usethis/0000755000176200001440000000000014721320322011732 5ustar liggesusersusethis/tests/0000755000176200001440000000000014721173103013077 5ustar liggesusersusethis/tests/testthat/0000755000176200001440000000000014721320322014734 5ustar liggesusersusethis/tests/testthat/test-ui-legacy.R0000644000176200001440000000156614651000165017725 0ustar liggesuserstest_that("basic legacy UI actions behave as expected", { # suppress test silencing withr::local_options(list(usethis.quiet = FALSE)) expect_snapshot({ ui_line("line") ui_todo("to do") ui_done("done") ui_oops("oops") ui_info("info") ui_code_block(c("x <- 1", "y <- 2")) ui_warn("a warning") }) }) test_that("legacy UI actions respect usethis.quiet = TRUE", { withr::local_options(list(usethis.quiet = TRUE)) expect_no_message({ ui_line("line") ui_todo("to do") ui_done("done") ui_oops("oops") ui_info("info") ui_code_block(c("x <- 1", "y <- 2")) }) }) test_that("ui_stop() works", { expect_usethis_error(ui_stop("an error"), "an error") }) test_that("ui_silence() suppresses output", { # suppress test silencing withr::local_options(list(usethis.quiet = FALSE)) expect_output(ui_silence(ui_line()), NA) }) usethis/tests/testthat/test-template.R0000644000176200001440000000147714651000165017662 0ustar liggesuserstest_that("can leave existing file unchanged, without an error", { create_local_package() desc_lines_before <- read_utf8(proj_path("DESCRIPTION")) expect_no_error( use_template("NEWS.md", "DESCRIPTION") ) desc_lines_after <- read_utf8(proj_path("DESCRIPTION")) expect_identical(desc_lines_before, desc_lines_after) }) # helpers ----------------------------------------------------------------- test_that("find_template errors if template missing", { expect_usethis_error(find_template("xxx"), "Could not find template") }) test_that("find_template can find templates for tricky Rbuildignored files", { expect_match(find_template("codecov.yml"), "codecov\\.yml$") expect_match(find_template("cran-comments.md"), "cran-comments\\.md$") expect_match(find_template("template.Rproj"), "template\\.Rproj$") }) usethis/tests/testthat/test-code-of-conduct.R0000644000176200001440000000025214651000165021006 0ustar liggesuserstest_that("use_code_of_conduct() creates promised file", { create_local_project() use_code_of_conduct("test@example.com") expect_proj_file("CODE_OF_CONDUCT.md") }) usethis/tests/testthat/test-utils.R0000644000176200001440000000167614651000165017210 0ustar liggesuserstest_that("check_is_named_list() works", { l <- list(a = "a", b = 2, c = letters) expect_identical(l, check_is_named_list(l)) user_facing_function <- function(somevar) { check_is_named_list(somevar) } expect_snapshot(error = TRUE, user_facing_function(NULL)) expect_snapshot(error = TRUE, user_facing_function(c(a = "a", b = "b"))) expect_snapshot(error = TRUE, user_facing_function(list("a", b = 2))) }) test_that("asciify() substitutes non-ASCII but respects case", { expect_identical(asciify("aB!d$F+_h"), "aB-d-F-_h") }) test_that("path_first_existing() works", { create_local_project() all_3_files <- proj_path(c("alfa", "bravo", "charlie")) expect_null(path_first_existing(all_3_files)) write_utf8(proj_path("charlie"), "charlie") expect_equal(path_first_existing(all_3_files), proj_path("charlie")) write_utf8(proj_path("bravo"), "bravo") expect_equal(path_first_existing(all_3_files), proj_path("bravo")) }) usethis/tests/testthat/test-use_standalone.R0000644000176200001440000000760514717524721021066 0ustar liggesuserstest_that("standalone_header() works with various inputs", { expect_snapshot( standalone_header("OWNER/REPO", "R/standalone-foo.R") ) expect_snapshot( standalone_header("OWNER/REPO", "R/standalone-foo.R", ref = "blah") ) expect_snapshot( standalone_header( "OWNER/REPO", "R/standalone-foo.R", host = "https://github.com" ) ) expect_snapshot( standalone_header( "OWNER/REPO", "R/standalone-foo.R", host = "https://github.acme.com" ) ) expect_snapshot( standalone_header( "OWNER/REPO", "R/standalone-foo.R", ref = "blah", host = "https://github.com" ) ) expect_snapshot( standalone_header( "OWNER/REPO", "R/standalone-foo.R", ref = "blah", host = "https://github.acme.com" ) ) }) test_that("can import standalone file with dependencies", { skip_if_offline("github.com") create_local_package() # NOTE: Check ref after r-lib/rlang@standalone-dep has been merged use_standalone("r-lib/rlang", "types-check", ref = "73182fe94") expect_setequal( as.character(path_rel(dir_ls(proj_path("R"))), proj_path()), c("R/import-standalone-types-check.R", "R/import-standalone-obj-type.R") ) desc <- proj_desc() imports <- proj_desc()$get_field("Imports") expect_length(imports, 1) expect_match(imports, "rlang") }) test_that("can use full github url", { skip_if_offline("github.com") create_local_package() use_standalone( "https://github.com/r-lib/rlang", file = "sizes", ref = "4670cb233ecc8d11" ) expect_equal( as.character(path_rel(dir_ls(proj_path("R"))), proj_path()), "R/import-standalone-sizes.R" ) }) test_that("can offer choices", { skip_if_offline("github.com") expect_snapshot(error = TRUE, { standalone_choose("tidyverse/forcats", ref = "v1.0.0") standalone_choose("r-lib/rlang", ref = "4670cb233ecc8d11") }) }) test_that("can extract dependencies", { extract_deps <- function(deps) { out <- standalone_dependencies(c("# ---", deps, "# ---"), "test.R") out$deps } expect_equal(extract_deps(NULL), character()) expect_equal(extract_deps("# dependencies: a"), "a") expect_equal(extract_deps("# dependencies: [a, b]"), c("a", "b")) }) test_that("can extract imports", { extract_imports <- function(imports) { out <- standalone_dependencies( c("# ---", imports, "# ---"), "test.R", error_call = current_env() ) out$imports } expect_equal( extract_imports(NULL), version_info_df() ) expect_equal( extract_imports("# imports: rlang"), version_info_df("rlang", NA, NA) ) expect_equal( extract_imports("# imports: rlang (>= 1.0.0)"), version_info_df("rlang", ">=", "1.0.0") ) expect_equal( extract_imports("# imports: [rlang (>= 1.0.0), purrr]"), version_info_df(c("rlang", "purrr"), c(">=", NA), c("1.0.0", NA)) ) expect_snapshot(error = TRUE, { extract_imports("# imports: rlang (== 1.0.0)") extract_imports("# imports: rlang (>= 1.0.0), purrr") extract_imports("# imports: foo (>=0.0.0)") }) }) test_that("errors on malformed dependencies", { expect_snapshot(error = TRUE, { standalone_dependencies(c(), "test.R") standalone_dependencies(c("# ---", "# dependencies: 1", "# ---"), "test.R") }) }) test_that("standalone file is normalised", { expect_equal(as_standalone_file("foo"), "standalone-foo.R") expect_equal(as_standalone_file("standalone-foo"), "standalone-foo.R") expect_equal(as_standalone_file("standalone-foo.R"), "standalone-foo.R") expect_equal(as_standalone_file("aaa-standalone-foo"), "aaa-standalone-foo.R") expect_equal(as_standalone_file("aaa-standalone-foo.R"), "aaa-standalone-foo.R") }) test_that("standalone destination file is normalised", { expect_equal(as_standalone_dest_file("standalone-foo.R"), "import-standalone-foo.R") expect_equal(as_standalone_dest_file("aaa-standalone-foo.R"), "aaa-import-standalone-foo.R") }) usethis/tests/testthat/test-use_github_file.R0000644000176200001440000000312714651000165021176 0ustar liggesuserstest_that("parse_file_url() works when it should", { expected <- list( parsed = TRUE, repo_spec = "OWNER/REPO", path = "path/to/some/file", ref = "REF", host = "https://github.com" ) expect_equal( parse_file_url("https://github.com/OWNER/REPO/blob/REF/path/to/some/file"), expected ) expect_equal( parse_file_url("https://raw.githubusercontent.com/OWNER/REPO/REF/path/to/some/file"), expected ) expected$path <- "file" expect_equal( parse_file_url("https://github.com/OWNER/REPO/blob/REF/file"), expected ) expect_equal( parse_file_url("https://github.com/OWNER/REPO/blob/REF/file"), parse_file_url("https://raw.githubusercontent.com/OWNER/REPO/REF/file") ) expected$host <- "https://github.acme.com" expect_equal( parse_file_url("https://github.acme.com/OWNER/REPO/blob/REF/file"), expected ) expect_equal( parse_file_url("https://raw.github.acme.com/OWNER/REPO/REF/file"), expected ) }) test_that("parse_file_url() gives up when it should", { out <- parse_file_url("OWNER/REPO") expect_false(out$parsed) }) test_that("parse_file_url() errors when it should", { expect_error(parse_file_url("https://github.com/OWNER/REPO")) expect_error(parse_file_url("https://github.com/OWNER/REPO.git")) expect_error(parse_file_url("https://github.com/OWNER/REPO/commit/abcdefg")) expect_error(parse_file_url("https://github.com/OWNER/REPO/releases/tag/vx.y.z")) expect_error(parse_file_url("https://github.com/OWNER/REPO/tree/BRANCH")) expect_error(parse_file_url("https://gitlab.com/OWNER/REPO/path/to/file")) }) usethis/tests/testthat/helper-mocks.R0000644000176200001440000000247414717524762017502 0ustar liggesuserslocal_cran_version <- function(version, .env = caller_env()) { local_mocked_bindings(cran_version = function() version, .env = .env) } local_check_installed <- function(.env = caller_env()) { local_mocked_bindings(check_installed = function(...) NULL, .env = .env) } local_rstudio_available <- function(val, .env = caller_env()) { local_mocked_bindings(rstudio_available = function(...) val, .env = .env) } local_target_repo_spec <- function(spec, .env = caller_env()) { local_mocked_bindings(target_repo_spec = function(...) spec, .env = .env) } local_roxygen_update_ns <- function(.env = caller_env()) { local_mocked_bindings(roxygen_update_ns = function(...) NULL, .env = .env) } local_check_fun_exists <- function(.env = caller_env()) { local_mocked_bindings(check_fun_exists = function(...) NULL, .env = .env) } local_ui_yep <- function(.env = caller_env()) { local_mocked_bindings(ui_yep = function(...) TRUE, .env = .env) } local_git_default_branch_remote <- function(.env = caller_env()) { local_mocked_bindings( git_default_branch_remote = function(cfg, remote) { list( name = remote, is_configured = TRUE, url = NA_character_, repo_spec = NA_character_, default_branch = as.character(glue("default-branch-of-{remote}")) ) }, .env = .env ) } usethis/tests/testthat/test-release.R0000644000176200001440000001634514717524721017503 0ustar liggesusers # release bullets --------------------------------------------------------- test_that("release bullets don't change accidentally", { withr::local_options(usethis.description = NULL) create_local_package() local_mocked_bindings( get_revdeps = function() "usethis" ) # First release expect_snapshot( writeLines(release_checklist("0.1.0", on_cran = FALSE)), transform = scrub_testpkg ) # Patch release expect_snapshot( writeLines(release_checklist("0.0.1", on_cran = TRUE)), transform = scrub_testpkg ) # Major release expect_snapshot( writeLines(release_checklist("1.0.0", on_cran = TRUE)), transform = scrub_testpkg ) }) test_that("non-patch + lifecycle = advanced deprecation process", { withr::local_options(usethis.description = NULL) create_local_package() use_package("lifecycle") local_mocked_bindings(tidy_minimum_r_version = function() "3.6") has_deprecation <- function(x) any(grepl("deprecation processes", x)) expect_true(has_deprecation(release_checklist("1.0.0", on_cran = TRUE))) expect_true(has_deprecation(release_checklist("1.1.0", on_cran = TRUE))) expect_false(has_deprecation(release_checklist("1.1.1", on_cran = TRUE))) }) test_that("get extra news bullets if available", { env <- env(release_bullets = function() "Extra bullets") expect_equal(release_extra_bullets(env), "* [ ] Extra bullets") env <- env(release_questions = function() "Extra bullets") expect_equal(release_extra_bullets(env), "* [ ] Extra bullets") env <- env() expect_equal(release_extra_bullets(env), character()) }) test_that("construct correct revdep bullet", { create_local_package() env <- env(release_extra_revdeps = function() c("waldo", "testthat")) local_mocked_bindings( get_revdeps = function() "usethis" ) expect_snapshot({ release_revdepcheck(on_cran = FALSE) release_revdepcheck(on_cran = TRUE, is_posit_pkg = FALSE) release_revdepcheck(on_cran = TRUE, is_posit_pkg = TRUE) release_revdepcheck(on_cran = TRUE, is_posit_pkg = TRUE, env = env) }) }) test_that("RStudio-ness detection works", { withr::local_options(usethis.description = NULL) create_local_package() local_mocked_bindings( tidy_minimum_r_version = function() numeric_version("3.6"), get_revdeps = function() "usethis" ) expect_false(is_posit_pkg()) desc <- proj_desc() desc$add_author(given = "PoSiT, PbC", role = "fnd") desc$add_author(given = "someone", email = "someone@Rstudio.com") desc$add_urls("https://github.com/tidyverse/WHATEVER") desc$set_dep("R", "Depends", version = ">= 3.4") desc$write() expect_true(is_posit_pkg()) expect_true(is_in_posit_org()) expect_false(is_posit_person_canonical()) expect_true(author_has_rstudio_email()) expect_snapshot( writeLines(release_checklist("1.0.0", on_cran = TRUE)), transform = scrub_testpkg ) }) test_that("can find milestone numbers", { skip_if_offline("github.com") tr <- list( repo_owner = "r-lib", repo_name = "usethis", api_url = "https://api.github.com" ) expect_equal( gh_milestone_number(tr, "2.1.6", state = "all"), 8 ) expect_equal( gh_milestone_number(tr, "0.0.0", state = "all"), NA_integer_ ) }) test_that("gh_milestone_number() returns NA when gh() errors", { local_mocked_bindings( gh_tr = function(tr) { function(endpoint, ...) { ui_abort("nope!") } } ) tr <- list( repo_owner = "r-lib", repo_name = "usethis", api_url = "https://api.github.com" ) expect_true(is.na(gh_milestone_number(tr, "1.1.1"))) }) # news -------------------------------------------------------------------- test_that("must have at least one heading", { expect_error( news_latest(""), regexp = "No top-level headings", class = "usethis_error" ) }) test_that("trims blank lines when extracting bullets", { lines <- c( "# Heading", "", "Contents", "" ) expect_equal(news_latest(lines), "Contents\n") lines <- c( "# Heading", "", "Contents 1", "", "# Heading", "", "Contents 2" ) expect_equal(news_latest(lines), "Contents 1\n") }) test_that("returns empty string if no bullets", { lines <- c( "# Heading", "", "# Heading" ) expect_equal(news_latest(lines), "") }) # draft release ---------------------------------------------------------------- test_that("get_release_data() works if no file found", { skip_if_no_git_user() local_interactive(FALSE) create_local_package() use_git() gert::git_add(".gitignore") gert::git_commit("we need at least one commit") res <- get_release_data() expect_equal(res$Version, "0.0.0.9000") expect_match(res$SHA, "[[:xdigit:]]{40}") }) test_that("get_release_data() works for old-style CRAN-RELEASE", { skip_if_no_git_user() local_interactive(FALSE) create_local_package() use_git() gert::git_add(".gitignore") gert::git_commit("we need at least one commit") HEAD <- gert::git_info(repo = git_repo())$commit write_utf8( proj_path("CRAN-RELEASE"), glue(" This package was submitted to CRAN on YYYY-MM-DD. Once it is accepted, delete this file and tag the release (commit {HEAD}).") ) res <- get_release_data(tr = list(repo_spec = "OWNER/REPO")) expect_equal(res$Version, "0.0.0.9000") expect_equal(res$SHA, HEAD) expect_equal(path_file(res$file), "CRAN-RELEASE") }) test_that("get_release_data() works for new-style CRAN-RELEASE", { skip_if_no_git_user() local_interactive(FALSE) create_local_package() use_git() gert::git_add(".gitignore") gert::git_commit("we need at least one commit") HEAD <- gert::git_info(repo = git_repo())$commit write_utf8( proj_path("CRAN-SUBMISSION"), glue(" Version: 1.2.3 Date: 2021-10-14 23:57:41 UTC SHA: {HEAD}") ) res <- get_release_data(tr = list(repo_spec = "OWNER/REPO")) expect_equal(res$Version, "1.2.3") expect_equal(res$SHA, HEAD) expect_equal(path_file(res$file), "CRAN-SUBMISSION") }) test_that("cran_version() returns package version if package found", { local_mocked_bindings(available.packages = function(...) { # simulate minimal available.packages entry as.matrix(data.frame(Package = c(usethis = "usethis"), Version = "1.0.0")) }) expect_null(cran_version("doesntexist")) expect_equal(cran_version("usethis"), package_version("1.0.0")) }) test_that("cran_version() returns NULL if no available packages", { local_mocked_bindings(available.packages = function(...) NULL) expect_null(cran_version("doesntexist")) }) test_that("default_cran_mirror() is respects set value but falls back to cloud", { withr::local_options(repos = c(CRAN = "https://example.com")) expect_equal(default_cran_mirror(), c(CRAN = "https://example.com")) withr::local_options(repos = c(CRAN = "@CRAN@")) expect_equal(default_cran_mirror(), c(CRAN = "https://cloud.r-project.org")) withr::local_options(repos = c()) expect_equal(default_cran_mirror(), c(CRAN = "https://cloud.r-project.org")) }) test_that("no revdep release bullets when there are no revdeps", { withr::local_options(usethis.description = NULL) create_local_package() local_mocked_bindings( get_revdeps = function() NULL ) expect_snapshot( writeLines(release_checklist("1.0.0", on_cran = TRUE)), transform = scrub_testpkg ) }) usethis/tests/testthat/test-make.R0000644000176200001440000000021414651000165016750 0ustar liggesuserstest_that("use_make() creates a Makefile at project root", { pkg <- create_local_package() use_make() expect_proj_file("Makefile") }) usethis/tests/testthat/test-utils-glue.R0000644000176200001440000000065014651000165020131 0ustar liggesuserstest_that("glue_chr() returns plain character, evals in correct env", { x <- letters[1:2] y <- LETTERS[25:26] f <- toupper expect_identical(glue_chr("{f(x)}-{y}"), c("A-Y", "B-Z")) }) test_that("glue_data_chr() returns plain character, evals in correct env", { z <- list(x = letters[1:2], y = LETTERS[25:26]) f <- tolower x <- 1 y <- 2 expect_identical(glue_data_chr(z, "{x}-{f(y)}"), c("a-y", "b-z")) }) usethis/tests/testthat/test-jenkins.R0000644000176200001440000000030614651000165017476 0ustar liggesuserstest_that("use_jenkins() creates a Makefile AND a Jenkinsfile at project root", { pkg <- create_local_package() use_jenkins() expect_proj_file("Makefile") expect_proj_file("Jenkinsfile") }) usethis/tests/testthat/test-create.R0000644000176200001440000001134414655570272017323 0ustar liggesuserstest_that("create_package() creates a package", { dir <- create_local_package() expect_true(possibly_in_proj(dir)) expect_true(is_package(dir)) }) test_that("create_project() creates a non-package project", { dir <- create_local_project() expect_true(possibly_in_proj(dir)) expect_false(is_package(dir)) }) test_that("create_*(open = FALSE) returns path to new proj, restores active proj", { path <- file_temp() cur_proj <- proj_get_() out_path <- create_package(path, open = FALSE) expect_equal(proj_get_(), cur_proj) expect_equal(proj_path_prep(path), out_path) dir_delete(out_path) out_path <- create_project(path, open = FALSE) expect_equal(proj_get_(), cur_proj) expect_equal(proj_path_prep(path), out_path) dir_delete(out_path) }) test_that("nested package is disallowed, by default", { dir <- create_local_package() expect_usethis_error(create_package(path(dir, "abcde")), "anyway") }) test_that("nested project is disallowed, by default", { dir <- create_local_project() expect_usethis_error(create_project(path(dir, "abcde")), "anyway") }) test_that("nested package can be created if user really, really wants to", { parent <- create_local_package() child_path <- path(parent, "fghijk") # since user can't approve interactively, use the backdoor withr::local_options("usethis.allow_nested_project" = TRUE) child_result <- create_package(child_path) expect_equal(child_path, child_result) expect_true(possibly_in_proj(child_path)) expect_true(is_package(child_path)) expect_equal(project_name(child_path), "fghijk") }) test_that("nested project can be created if user really, really wants to", { parent <- create_local_project() child_path <- path(parent, "fghijk") # since user can't approve interactively, use the backdoor withr::local_options("usethis.allow_nested_project" = TRUE) child_result <- create_project(child_path) expect_equal(child_path, child_result) expect_true(possibly_in_proj(child_path)) expect_equal(project_name(child_path), "fghijk") }) test_that("can create package in current directory (literally in '.')", { target_path <- dir_create(file_temp("mypackage")) withr::defer(dir_delete(target_path)) withr::local_dir(target_path) orig_proj <- proj_get_() orig_wd <- path_wd() expect_no_error( out_path <- create_package(".", open = FALSE) ) expect_equal(path_wd(), orig_wd) expect_equal(proj_get_(), orig_proj) }) ## https://github.com/r-lib/usethis/issues/227 test_that("create_* works w/ non-existing rel path, open = FALSE case", { sandbox <- path_real(dir_create(file_temp("sandbox"))) orig_proj <- proj_get_() orig_wd <- path_wd() withr::defer(dir_delete(sandbox)) withr::defer(proj_set(orig_proj, force = TRUE)) withr::local_dir(sandbox) rel_path_pkg <- path_file(file_temp(pattern = "abc")) expect_no_error( out_path <- create_package(rel_path_pkg, open = FALSE) ) expect_true(dir_exists(rel_path_pkg)) expect_equal(out_path, proj_path_prep(rel_path_pkg)) expect_equal(proj_get_(), orig_proj) expect_equal(path_wd(), sandbox) rel_path_proj <- path_file(file_temp(pattern = "def")) expect_no_error( out_path <- create_project(rel_path_proj, open = FALSE) ) expect_true(dir_exists(rel_path_proj)) expect_equal(out_path, proj_path_prep(rel_path_proj)) expect_equal(proj_get_(), orig_proj) expect_equal(path_wd(), sandbox) }) # https://github.com/r-lib/usethis/issues/1122 test_that("create_*() works w/ non-existing rel path, open = TRUE, not in RStudio", { sandbox <- path_real(dir_create(file_temp("sandbox"))) orig_proj <- proj_get_() withr::defer(dir_delete(sandbox)) withr::defer(proj_set(orig_proj, force = TRUE)) withr::local_dir(sandbox) local_rstudio_available(FALSE) # package rel_path_pkg <- path_file(file_temp(pattern = "ghi")) expect_no_error( out_path <- create_package(rel_path_pkg, open = TRUE) ) exp_path_pkg <- path(sandbox, rel_path_pkg) expect_equal(out_path, exp_path_pkg) expect_equal(path_wd(), out_path) expect_equal(proj_get(), out_path) setwd(sandbox) # project rel_path_proj <- path_file(file_temp(pattern = "jkl")) expect_no_error( out_path <- create_project(rel_path_proj, open = TRUE) ) exp_path_proj <- path(sandbox, rel_path_proj) expect_equal(out_path, exp_path_proj) expect_equal(path_wd(), out_path) expect_equal(proj_get(), out_path) }) test_that("we discourage project creation in home directory", { local_interactive(FALSE) expect_usethis_error(create_package(path_home()), "create anyway") expect_usethis_error(create_project(path_home()), "create anyway") if (is_windows()) { expect_usethis_error(create_package(path_home_r()), "create anyway") expect_usethis_error(create_project(path_home_r()), "create anyway") } }) usethis/tests/testthat/test-version.R0000644000176200001440000000505614651000165017531 0ustar liggesuserstest_that("bump_version() presents all possible incremented versions", { expect_identical( bump_version("1.1.1.9000"), c(major = "2.0.0", minor = "1.2.0", patch = "1.1.2", dev = "1.1.1.9001") ) }) test_that("use_version() and use_dev_version() require a package", { create_local_project() expect_usethis_error(use_version("major"), "not an R package") expect_usethis_error(use_dev_version(), "not an R package") }) test_that("use_version() errors for invalid `which`", { create_local_package() expect_snapshot(error = TRUE, use_version("1.2.3")) }) test_that("use_version() increments version in DESCRIPTION, edits NEWS", { create_local_package() proj_desc_field_update( key = "Version", value = "1.1.1.9000", overwrite = TRUE ) local_cran_version("1.1.1") use_news_md() use_version("major") expect_identical(proj_version(), "2.0.0") expect_snapshot( writeLines(read_utf8(proj_path("NEWS.md"))), transform = scrub_testpkg ) }) test_that("use_dev_version() appends .9000 to Version, exactly once", { create_local_package() proj_desc_field_update(key = "Version", value = "0.0.1", overwrite = TRUE) use_dev_version() expect_identical(proj_version(), "0.0.1.9000") use_dev_version() expect_identical(proj_version(), "0.0.1.9000") }) test_that("use_version() updates (development version) directly", { create_local_package() proj_desc_field_update(key = "Version", value = "0.0.1", overwrite = TRUE) local_cran_version("0.0.1") use_news_md() # bump to dev to set (development version) use_dev_version() # directly overwrite development header use_version("patch") expect_snapshot( writeLines(read_utf8(proj_path("NEWS.md"))), transform = scrub_testpkg ) }) test_that("use_version() updates version.c", { create_local_package() proj_desc_field_update(key = "Version", value = "1.0.0", overwrite = TRUE) name <- project_name() src_path <- proj_path("src") ver_path <- path(src_path, "version.c") dir_create(src_path) write_utf8(ver_path, glue(' foo; const char {name}_version = "1.0.0"; bar;')) use_dev_version() lines <- read_utf8(ver_path) expect_snapshot(writeLines(lines), transform = scrub_testpkg) }) test_that("is_dev_version() detects dev version directly and with DESCRIPTION", { expect_true(is_dev_version("0.0.1.9000")) expect_false(is_dev_version("0.0.1")) create_local_package() proj_desc_field_update(key = "Version", value = "1.0.0", overwrite = TRUE) expect_false(is_dev_version()) use_dev_version() expect_true(is_dev_version()) }) usethis/tests/testthat/test-usethis-deprecated.R0000644000176200001440000000024614717524721021636 0ustar liggesuserstest_that("use_tidy_eval() is deprecated", { skip_if_not_installed("roxygen2") pkg <- create_local_package() expect_snapshot(use_tidy_eval(), error = TRUE) }) usethis/tests/testthat/test-addin.R0000644000176200001440000000066414651000165017123 0ustar liggesuserstest_that("use_addin() creates the first addins.dcf as promised", { create_local_package() use_addin("addin.test") addin_dcf <- read_utf8(proj_path("inst", "rstudio", "addins.dcf")) expected_file <- path_package("usethis", "templates", "addins.dcf") addin_dcf_expected <- read_utf8(expected_file) addin_dcf_expected[3] <- "Binding: addin.test" addin_dcf_expected[5] <- "" expect_equal(addin_dcf, addin_dcf_expected) }) usethis/tests/testthat/test-pipe.R0000644000176200001440000000113214651000165016770 0ustar liggesuserstest_that("use_pipe() requires a package", { create_local_project() expect_usethis_error(use_pipe(), "not an R package") }) test_that("use_pipe(export = TRUE) adds promised file, Imports magrittr", { create_local_package() use_pipe(export = TRUE) expect_equal(desc::desc_get_field("Imports"), "magrittr") expect_proj_file("R", "utils-pipe.R") }) test_that("use_pipe(export = FALSE) adds roxygen to package doc", { create_local_package() use_package_doc() use_pipe(export = FALSE) expect_equal(desc::desc_get_field("Imports"), "magrittr") expect_snapshot(roxygen_ns_show()) }) usethis/tests/testthat/test-pkgdown.R0000644000176200001440000000464514651000165017520 0ustar liggesuserstest_that("use_pkgdown() requires a package", { create_local_project() expect_usethis_error(use_pkgdown(), "not an R package") }) test_that("use_pkgdown() creates and ignores the promised file/dir", { create_local_package() local_interactive(FALSE) local_check_installed() local_mocked_bindings(pkgdown_version = function() "1.9000") withr::local_options(usethis.quiet = FALSE) expect_snapshot( use_pkgdown() ) expect_true(uses_pkgdown()) expect_true(is_build_ignored("^_pkgdown\\.yml$")) expect_true(is_build_ignored("^docs$")) }) # pkgdown helpers ---- test_that("pkgdown helpers behave in the absence of pkgdown", { create_local_package() expect_null(pkgdown_config_path()) expect_false(uses_pkgdown()) expect_equal(pkgdown_config_meta(), list()) expect_null(pkgdown_url()) }) test_that("pkgdown_config_meta() returns a list", { create_local_package() local_interactive(FALSE) local_check_installed() local_mocked_bindings(pkgdown_version = function() "1.9000") use_pkgdown() expect_type(pkgdown_config_meta(), "list") writeLines(c("home:", " strip_header: true"), pkgdown_config_path()) expect_equal( pkgdown_config_meta(), list(home = list(strip_header = TRUE)) ) }) test_that("pkgdown_url() returns correct data, warns if pedantic", { create_local_package() local_interactive(FALSE) local_check_installed() local_mocked_bindings(pkgdown_version = function() "1.9000") use_pkgdown() # empty config expect_null(pkgdown_url()) expect_silent(pkgdown_url()) withr::local_options(list(usethis.quiet = FALSE)) expect_snapshot( pkgdown_url(pedantic = TRUE) ) # nonempty config, but no url writeLines(c("home:", " strip_header: true"), pkgdown_config_path()) expect_null(pkgdown_url()) expect_silent(pkgdown_url()) expect_snapshot( pkgdown_url(pedantic = TRUE) ) # config has url writeLines("url: https://usethis.r-lib.org", pkgdown_config_path()) expect_equal(pkgdown_url(), "https://usethis.r-lib.org") # config has url with trailing slash writeLines("url: https://malcolmbarrett.github.io/tidysmd/", pkgdown_config_path()) expect_equal(pkgdown_url(), "https://malcolmbarrett.github.io/tidysmd/") }) test_that("tidyverse_url() leaves trailing slash alone, almost always", { url <- "https://malcolmbarrett.github.io/tidysmd/" out <- tidyverse_url(url, tr = list(repo_name = "REPO", repo_owner = "OWNER")) expect_equal(out, url) }) usethis/tests/testthat/test-news.R0000644000176200001440000000250514651514262017024 0ustar liggesuserstest_that("use_news_md() sets (development version)/'Initial submission' in new pkg", { create_local_package() local_cran_version(NULL) use_news_md() expect_snapshot(writeLines(read_utf8(proj_path("NEWS.md"))), transform = scrub_testpkg) }) test_that("use_news_md() sets bullet to 'Added a NEWS.md file...' when on CRAN", { create_local_package() # on CRAN, local dev version proj_desc_field_update(key = "Version", value = "0.1.0.9000") local_cran_version("0.1.0") use_news_md() expect_snapshot(writeLines(read_utf8(proj_path("NEWS.md"))), transform = scrub_testpkg) }) test_that("use_news_md() sets version number when 'production version'", { create_local_package() proj_desc_field_update(key = "Version", value = "0.2.0") local_cran_version(NULL) use_news_md() expect_snapshot(writeLines(read_utf8(proj_path("NEWS.md"))), transform = scrub_testpkg) }) test_that("use_news_heading() tolerates blank lines at start", { create_local_package() header <- sprintf("# %s (development version)", project_name()) writeLines(c("", header, "", "* Fixed the bugs."), con = "NEWS.md") use_news_heading(version = "1.0.0") contents <- read_utf8("NEWS.md") expected <- sprintf("# %s 1.0.0", project_name()) expect_equal(contents[[2L]], expected) }) usethis/tests/testthat/test-course.R0000644000176200001440000002552014717524721017356 0ustar liggesusers## download_url ---- test_that("download_url() retry logic works as advertised", { faux_download <- function(n_failures) { i <- 0 function(url, destfile, quiet, mode, handle) { i <<- i + 1 if (i <= n_failures) simpleError(paste0("try ", i)) else "success" } } withr::local_options(list(usethis.quiet = FALSE)) # succeed on first try local_mocked_bindings( try_download = faux_download(0) ) expect_snapshot(out <- download_url(url = "URL", destfile = "destfile")) expect_s3_class(out, "curl_handle") # fail, then succeed local_mocked_bindings( try_download = faux_download(1) ) expect_snapshot(out <- download_url(url = "URL", destfile = "destfile")) expect_s3_class(out, "curl_handle") # fail, fail, then succeed (default n_tries = 3, so should allow) local_mocked_bindings( try_download = faux_download(2) ) expect_snapshot(out <- download_url(url = "URL", destfile = "destfile")) expect_s3_class(out, "curl_handle") # fail, fail, fail (exceed n_failures > n_tries = 3) local_mocked_bindings( try_download = faux_download(5) ) expect_snapshot( out <- download_url(url = "URL", destfile = "destfile", n_tries = 3), error = TRUE ) # fail, fail, fail, succeed (make sure n_tries is adjustable) local_mocked_bindings( try_download = faux_download(3) ) expect_snapshot(out <- download_url(url = "URL", destfile = "destfile", n_tries = 10)) expect_s3_class(out, "curl_handle") }) ## tidy_download ---- test_that("tidy_download() errors early if destdir is not a directory", { tmp <- fs::path_temp("I_am_just_a_file") withr::defer(fs::file_delete(tmp)) expect_usethis_error(tidy_download("URL", destdir = tmp), "does not exist") fs::file_create(tmp) expect_usethis_error(tidy_download("URL", destdir = tmp), "not a directory") }) test_that("tidy_download() works", { skip_if_offline("github.com") tmp <- withr::local_tempdir(pattern = "tidy-download-test-") gh_url <- "https://github.com/r-lib/rematch2/archive/main.zip" expected <- fs::path(tmp, "rematch2-main.zip") capture.output( out <- tidy_download(gh_url, destdir = tmp) ) expect_true(fs::file_exists(expected)) expect_identical(out, expected, ignore_attr = TRUE) expect_identical(attr(out, "content-type"), "application/zip") # refuse to overwrite when non-interactive expect_error(capture.output( tidy_download(gh_url, destdir = tmp) )) }) ## tidy_unzip ---- test_that("tidy_unzip(): explicit parent, file example", { local_interactive(FALSE) zipfile <- withr::local_tempfile(fileext = ".zip") file_copy(test_file("foo-explicit-parent.zip"), zipfile) dest <- tidy_unzip(zipfile) withr::defer(dir_delete(dest)) expect_equal(path_file(dest), "foo") explicit_parent_files <- path_file(dir_ls(dest, recurse = TRUE)) expect_equal(explicit_parent_files, "file.txt") }) test_that("tidy_unzip(): explicit parent, folders example", { local_interactive(FALSE) files <- c("subdir1", "file1.txt", "subdir2", "file2.txt") zipfile <- withr::local_tempfile(fileext = ".zip") file_copy(test_file("yo-explicit-parent.zip"), zipfile) dest <- tidy_unzip(zipfile) withr::defer(dir_delete(dest)) expect_equal(path_file(dest), "yo") explicit_parent_files <- path_file(dir_ls(dest, recurse = TRUE)) expect_setequal(explicit_parent_files, files) }) test_that("tidy_unzip(): implicit parent, file example", { local_interactive(FALSE) zipfile <- withr::local_tempfile(fileext = ".zip") file_copy(test_file("foo-implicit-parent.zip"), zipfile) dest <- tidy_unzip(zipfile) withr::defer(dir_delete(dest)) expect_equal(path_file(dest), "foo") implicit_parent_files <- path_file(dir_ls(dest, recurse = TRUE)) expect_equal(implicit_parent_files, "file.txt") }) test_that("tidy_unzip(): implicit parent, folders example", { local_interactive(FALSE) files <- c("subdir1", "file1.txt", "subdir2", "file2.txt") zipfile <- withr::local_tempfile(fileext = ".zip") file_copy(test_file("yo-implicit-parent.zip"), zipfile) dest <- tidy_unzip(zipfile) withr::defer(dir_delete(dest)) expect_equal(path_file(dest), "yo") implicit_parent_files <- path_file(dir_ls(dest, recurse = TRUE)) expect_setequal(implicit_parent_files, files) }) test_that("tidy_unzip(): no parent, file example", { local_interactive(FALSE) zipfile <- withr::local_tempfile(fileext = ".zip") file_copy(test_file("foo-no-parent.zip"), zipfile) dest <- tidy_unzip(zipfile) withr::defer(dir_delete(dest)) expect_equal(path_file(dest), path_ext_remove(path_file(zipfile))) no_parent_files <- path_file(dir_ls(dest, recurse = TRUE)) expect_setequal(no_parent_files, "file.txt") }) test_that("tidy_unzip(): no parent, folders example", { local_interactive(FALSE) files <- c("subdir1", "file1.txt", "subdir2", "file2.txt") zipfile <- withr::local_tempfile(fileext = ".zip") file_copy(test_file("yo-no-parent.zip"), zipfile) dest <- tidy_unzip(zipfile) withr::defer(dir_delete(dest)) expect_equal(path_file(dest), path_ext_remove(path_file(zipfile))) no_parent_files <- path_file(dir_ls(dest, recurse = TRUE)) expect_setequal(no_parent_files, files) }) test_that("tidy_unzip(): DropBox, file example", { local_interactive(FALSE) zipfile <- withr::local_tempfile(fileext = ".zip") file_copy(test_file("foo-loose-dropbox.zip"), zipfile) dest <- tidy_unzip(zipfile) withr::defer(dir_delete(dest)) expect_equal(path_file(dest), path_ext_remove(path_file(zipfile))) loose_dropbox_files <- path_file(dir_ls(dest, recurse = TRUE)) expect_setequal(loose_dropbox_files, "file.txt") }) test_that("tidy_unzip(): DropBox, folders example", { local_interactive(FALSE) files <- c("subdir1", "file1.txt", "subdir2", "file2.txt") zipfile <- withr::local_tempfile(fileext = ".zip") file_copy(test_file("yo-loose-dropbox.zip"), zipfile) dest <- tidy_unzip(zipfile) withr::defer(dir_delete(dest)) expect_equal(path_file(dest), path_ext_remove(path_file(zipfile))) loose_dropbox_files <- path_file(dir_ls(dest, recurse = TRUE)) expect_setequal(loose_dropbox_files, files) }) test_that("path_before_slash() works", { expect_equal(path_before_slash(""), "") expect_equal(path_before_slash("/"), "") expect_equal(path_before_slash("a/"), "a") expect_equal(path_before_slash("a/b"), "a") expect_equal(path_before_slash("a/b/c"), "a") expect_equal(path_before_slash("a/b/c/"), "a") }) ## helpers ---- test_that("create_download_url() works", { expect_equal( create_download_url("https://rstudio.com"), "https://rstudio.com" ) expect_equal( create_download_url("https://drive.google.com/open?id=123456789xxyyyzzz"), "https://drive.google.com/uc?export=download&id=123456789xxyyyzzz" ) expect_equal( create_download_url( "https://drive.google.com/file/d/123456789xxxyyyzzz/view" ), "https://drive.google.com/uc?export=download&id=123456789xxxyyyzzz" ) expect_equal( create_download_url("https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=0"), "https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=1" ) # GitHub usethis_url <- "https://github.com/r-lib/usethis/zipball/HEAD" expect_equal( create_download_url("https://github.com/r-lib/usethis"), usethis_url ) expect_equal( create_download_url("https://github.com/r-lib/usethis/issues"), usethis_url ) expect_equal( create_download_url("https://github.com/r-lib/usethis#readme"), usethis_url ) }) test_that("normalize_url() prepends https:// (or not)", { expect_snapshot(normalize_url(1), error = TRUE) expect_identical(normalize_url("http://bit.ly/abc"), "http://bit.ly/abc") expect_identical(normalize_url("bit.ly/abc"), "https://bit.ly/abc") expect_identical( normalize_url("https://github.com/r-lib/rematch2/archive/main.zip"), "https://github.com/r-lib/rematch2/archive/main.zip" ) expect_identical( normalize_url("https://rstd.io/usethis-src"), "https://rstd.io/usethis-src" ) expect_identical( normalize_url("rstd.io/usethis-src"), "https://rstd.io/usethis-src" ) }) test_that("shortlinks pass through", { url1 <- "bit.ly/usethis-shortlink-example" url2 <- "rstd.io/usethis-shortlink-example" expect_equal(normalize_url(url1), paste0("https://", url1)) expect_equal(normalize_url(url2), paste0("https://", url2)) expect_equal(normalize_url(paste0("https://", url1)), paste0("https://", url1)) expect_equal(normalize_url(paste0("http://", url1)), paste0("http://", url1)) }) test_that("github links get expanded", { expect_equal( normalize_url("OWNER/REPO"), "https://github.com/OWNER/REPO/zipball/HEAD" ) }) test_that("conspicuous_place() returns a writeable directory", { skip_on_cran_macos() # even $HOME is not writeable on CRAN macOS builder expect_no_error(x <- conspicuous_place()) expect_true(is_dir(x)) expect_true(file_access(x, mode = "write")) }) test_that("conspicuous_place() uses `usethis.destdir` when set", { destdir <- withr::local_tempdir(pattern = "destdir_temp") withr::local_options(list(usethis.destdir = destdir)) expect_no_error(x <- conspicuous_place()) expect_equal(path_tidy(destdir), x) }) test_that("use_course() errors if MIME type is not 'application/zip'", { skip_if_offline() path <- withr::local_tempdir() expect_usethis_error( use_course("https://httpbin.org/get", destdir = path), "does not have MIME type" ) }) test_that("parse_content_disposition() parses Content-Description", { ## typical DropBox expect_identical( parse_content_disposition( "attachment; filename=\"foo.zip\"; filename*=UTF-8''foo.zip\"" ), c( "filename" = "\"foo.zip\"", "filename*" = "UTF-8''foo.zip\"" ) ) ## typical GitHub expect_identical( parse_content_disposition("attachment; filename=foo-main.zip"), c("filename" = "foo-main.zip") ) }) test_that("parse_content_disposition() errors on ill-formed `content-disposition` header", { expect_usethis_error( parse_content_disposition("aa;bb=cc;dd"), "doesn't start with" ) }) test_that("make_filename() gets name from `content-disposition` header", { ## DropBox expect_identical( make_filename( c( "filename" = "\"usethis-test.zip\"", "filename*" = "UTF-8''usethis-test.zip\"" ) ), "usethis-test.zip" ) ## GitHub expect_identical( make_filename(c("filename" = "buzzy-main.zip")), "buzzy-main.zip" ) }) test_that("make_filename() uses fallback if no `content-disposition` header", { expect_match(make_filename(NULL), "^file[0-9a-z]+$") }) test_that("keep_lgl() keeps and drops correct files", { keepers <- c("foo", ".gitignore", "a/.gitignore", "foo.Rproj", ".here") expect_true(all(keep_lgl(keepers))) droppers <- c( ".git", "/.git", "/.git/", ".git/", "foo/.git", ".git/config", ".git/objects/06/3d3gysle", ".Rproj.user", ".Rproj.user/123jkl/persistent-state", ".Rhistory", ".RData" ) expect_false(any(keep_lgl(droppers))) }) usethis/tests/testthat/test-write.R0000644000176200001440000001615014651000165017173 0ustar liggesusers# test that write_utf8() does not alter active project and # does not consult active project for line ending test_that("write_utf8(): no active project, write path outside project", { local_project(NULL) expect_false(proj_active()) dir <- withr::local_tempdir(pattern = "write-utf8-nonproject") expect_false(possibly_in_proj(dir)) write_utf8(path(dir, "letters_LF"), letters[1:2], line_ending = "\n") expect_equal( readBin(path(dir, "letters_LF"), what = "raw", n = 3), charToRaw("a\nb") ) write_utf8(path(dir, "letters_CRLF"), letters[1:2], line_ending = "\r\n") expect_equal( readBin(path(dir, "letters_CRLF"), what = "raw", n = 3), charToRaw("a\r\n") ) expect_false(proj_active()) }) test_that("write_utf8(): no active project, write to path inside a project", { local_project(NULL) expect_false(proj_active()) dir <- withr::local_tempdir(pattern = "write-utf8-in-a-project") file_create(path(dir, ".here")) expect_true(possibly_in_proj(dir)) with_project(dir, use_rstudio(line_ending = "posix")) write_utf8(path(dir, "letters"), letters[1:2]) expect_equal( readBin(path(dir, "letters"), what = "raw", n = 3), charToRaw("a\nb") ) file_delete(path(dir, paste0(path_file(dir), ".Rproj"))) with_project(dir, use_rstudio(line_ending = "windows")) write_utf8(path(dir, "letters"), letters[1:2]) expect_equal( readBin(path(dir, "letters"), what = "raw", n = 3), charToRaw("a\r\n") ) expect_false(proj_active()) }) test_that("write_utf8(): in an active project, write path outside project", { proj <- create_local_project(rstudio = TRUE) expect_true(proj_active()) dir <- withr::local_tempdir(pattern = "write-utf8-nonproject") expect_false(possibly_in_proj(dir)) write_utf8(path(dir, "letters_LF"), letters[1:2], line_ending = "\n") expect_equal( readBin(path(dir, "letters_LF"), what = "raw", n = 3), charToRaw("a\nb") ) write_utf8(path(dir, "letters_CRLF"), letters[1:2], line_ending = "\r\n") expect_equal( readBin(path(dir, "letters_CRLF"), what = "raw", n = 3), charToRaw("a\r\n") ) expect_equal(proj_get(), proj) }) test_that("write_utf8(): in an active project, write path in other project", { proj <- create_local_project(rstudio = TRUE) expect_true(proj_active()) dir <- withr::local_tempdir(pattern = "write-utf8-in-a-project") file_create(path(dir, ".here")) expect_true(possibly_in_proj(dir)) with_project(dir, use_rstudio(line_ending = "posix")) write_utf8(path(dir, "letters"), letters[1:2]) expect_equal( readBin(path(dir, "letters"), what = "raw", n = 3), charToRaw("a\nb") ) file_delete(path(dir, paste0(path_file(dir), ".Rproj"))) with_project(dir, use_rstudio(line_ending = "windows")) write_utf8(path(dir, "letters"), letters[1:2]) expect_equal( readBin(path(dir, "letters"), what = "raw", n = 3), charToRaw("a\r\n") ) expect_equal(proj_get(), proj) }) test_that("write_utf8() can append text when requested", { path <- file_temp() write_utf8(path, "x", line_ending = "\n") write_utf8(path, "x", line_ending = "\n", append = TRUE) expect_equal(readChar(path, 4), "x\nx\n") }) test_that("write_utf8() respects line ending", { path <- file_temp() write_utf8(path, "x", line_ending = "\n") expect_equal(detect_line_ending(path), "\n") write_utf8(path, "x", line_ending = "\r\n") expect_equal(detect_line_ending(path), "\r\n") }) # TODO: explore more edge cases re: active project on both sides test_that("write_utf8() can operate outside of a project", { dir <- withr::local_tempdir(pattern = "write-utf8-test") withr::local_dir(dir) local_project(NULL) expect_false(proj_active()) expect_no_error(write_utf8(path = "foo", letters[1:3])) }) # https://github.com/r-lib/usethis/issues/514 test_that("write_utf8() always produces a trailing newline", { path <- file_temp() write_utf8(path, "x", line_ending = "\n") expect_equal(readChar(path, 2), "x\n") }) test_that("write_union() writes a de novo file", { tmp <- file_temp() expect_false(file_exists(tmp)) write_union(tmp, letters[1:3], quiet = TRUE) expect_identical(read_utf8(tmp), letters[1:3]) }) test_that("write_union() leaves file 'as is'", { tmp <- file_temp() writeLines(letters[1:3], tmp) before <- read_utf8(tmp) write_union(tmp, "b", quiet = TRUE) expect_identical(before, read_utf8(tmp)) }) test_that("write_union() adds lines", { tmp <- file_temp() writeLines(letters[1:3], tmp) write_union(tmp, letters[4:5], quiet = TRUE) expect_setequal(read_utf8(tmp), letters[1:5]) }) # https://github.com/r-lib/usethis/issues/526 test_that("write_union() doesn't remove duplicated lines in the input", { tmp <- file_temp() before <- rep(letters[1:2], 3) add_me <- c("z", "a", "c", "a", "b") writeLines(before, tmp) expect_identical(before, read_utf8(tmp)) write_union(tmp, add_me, quiet = TRUE) expect_identical(read_utf8(tmp), c(before, c("z", "c"))) }) test_that("same_contents() detects if contents are / are not same", { tmp <- file_temp() x <- letters[1:3] writeLines(x, con = tmp, sep = "\n") expect_true(same_contents(tmp, x)) expect_false(same_contents(tmp, letters[4:6])) }) test_that("write_over() leaves file 'as is' (outside of a project)", { local_interactive(FALSE) tmp <- withr::local_file(file_temp()) writeLines(letters[1:3], tmp) before <- read_utf8(tmp) write_over(tmp, letters[4:6], quiet = TRUE) expect_identical(read_utf8(tmp), before) # usethis.overwrite shouldn't matter for a file outside of a project withr::with_options( list(usethis.overwrite = TRUE), { write_over(tmp, letters[4:6], quiet = TRUE) expect_identical(read_utf8(tmp), before) } ) }) test_that("write_over() works in active project", { local_interactive(FALSE) create_local_project() tmp <- proj_path("foo.txt") writeLines(letters[1:3], tmp) before <- read_utf8(tmp) write_over(tmp, letters[4:6], quiet = TRUE) expect_identical(read_utf8(tmp), before) use_git() withr::with_options( list(usethis.overwrite = TRUE), { write_over(tmp, letters[4:6], quiet = TRUE) expect_identical(read_utf8(tmp), letters[4:6]) } ) }) test_that("write_over() works for a file in a project that is not active", { local_interactive(FALSE) owd <- getwd() proj <- create_local_project() use_git() tmp <- proj_path("foo.txt") writeLines(letters[1:3], tmp) withr::local_dir(owd) local_project(NULL) expect_false(proj_active()) tmp <- path(proj, "foo.txt") before <- read_utf8(tmp) withr::with_options( list(usethis.overwrite = FALSE), { write_over(tmp, letters[4:6], quiet = TRUE) expect_identical(read_utf8(tmp), before) } ) withr::with_options( list(usethis.overwrite = TRUE), { write_over(tmp, letters[4:6], quiet = TRUE) expect_identical(read_utf8(tmp), letters[4:6]) } ) expect_false(proj_active()) }) test_that("write_union() messaging is correct with weird working directory", { create_local_project() use_directory("aaa/bbb") setwd("aaa/bbb") withr::local_options(usethis.quiet = FALSE) expect_snapshot( write_union(proj_path("somefile"), letters[4:6]) ) }) usethis/tests/testthat/test-proj-desc.R0000644000176200001440000000172614651000165017732 0ustar liggesuserstest_that("proj_desc_field_update() only messages when adding", { create_local_package() withr::local_options(list(usethis.quiet = FALSE, crayon.enabled = FALSE)) expect_snapshot({ proj_desc_field_update("Config/Needs/foofy", "alfa", append = TRUE) proj_desc_field_update("Config/Needs/foofy", "alfa", append = TRUE) proj_desc_field_update("Config/Needs/foofy", "bravo", append = TRUE) }) expect_equal(proj_desc()$get_list("Config/Needs/foofy"), c("alfa", "bravo")) }) test_that("proj_desc_field_update() works with multiple values", { create_local_package() # Add something to begin with proj_desc_field_update("Config/Needs/foofy", "alfa", append = TRUE) withr::local_options(list(usethis.quiet = FALSE, crayon.enabled = FALSE)) expect_snapshot({ proj_desc_field_update("Config/Needs/foofy", c("alfa", "bravo"), append = TRUE) }) expect_equal(proj_desc()$get_list("Config/Needs/foofy"), c("alfa", "bravo")) }) usethis/tests/testthat/test-git-default-branch.R0000644000176200001440000000742214651000165021503 0ustar liggesuserstest_that("git_default_branch() consults the default branch candidates, in order", { skip_on_cran() skip_if_no_git_user() local_interactive(FALSE) create_local_project() use_git() repo <- git_repo() gert::git_add(".gitignore", repo = repo) gert::git_commit("a commit, so we are not on an unborn branch", repo = repo) # singleton branch, with weird name git_default_branch_rename(from = git_branch(), to = "foofy") expect_equal(git_default_branch(), "foofy") # two weirdly named branches, but one matches init.defaultBranch (local) config gert::git_branch_create("blarg", checkout = TRUE, repo = repo) use_git_config("project", `init.defaultBranch` = "blarg") expect_equal(git_default_branch(), "blarg") # one of the Usual Suspects shows up gert::git_branch_create("master", checkout = TRUE, repo = repo) expect_equal(git_default_branch(), "master") # and another Usual Suspect shows up gert::git_branch_create("main", checkout = TRUE, repo = repo) expect_equal(git_default_branch(), "main") # finally, prefer something that matches what upstream says is default gert::git_branch_create("default-branch-of-upstream", checkout = TRUE, repo = repo) local_git_default_branch_remote() expect_equal(git_default_branch(), "default-branch-of-upstream") }) test_that("git_default_branch() errors if can't find obvious local default branch", { skip_on_cran() skip_if_no_git_user() local_interactive(FALSE) create_local_project() use_git() repo <- git_repo() gert::git_add(".gitignore", repo = repo) gert::git_commit("a commit, so we are not on an unborn branch", repo = repo) git_default_branch_rename(from = git_branch(), to = "foofy") gert::git_branch_create("blarg", checkout = TRUE, repo = repo) expect_error(git_default_branch(), class = "error_default_branch") }) test_that("git_default_branch() errors for local vs remote mismatch", { skip_on_cran() skip_if_no_git_user() local_interactive(FALSE) create_local_project() use_git() repo <- git_repo() gert::git_add(".gitignore", repo = repo) gert::git_commit("a commit, so we are not on an unborn branch", repo = repo) git_default_branch_rename(from = git_branch(), to = "foofy") local_git_default_branch_remote() expect_error(git_default_branch(), class = "error_default_branch") gert::git_branch_create("blarg", checkout = TRUE, repo = repo) local_git_default_branch_remote() expect_error(git_default_branch(), class = "error_default_branch") }) test_that("git_default_branch_rename() surfaces files that smell fishy", { skip_on_cran() skip_if_no_git_user() local_interactive(FALSE) # for snapshot purposes, I don't want a random project name create_local_project(path(path_temp(), "abcde")) use_git() repo <- git_repo() gert::git_add(".gitignore", repo = repo) gert::git_commit("a commit, so we are not on an unborn branch", repo = repo) # make sure we start with default branch = 'master' git_default_branch_rename(from = git_branch(), to = "master") expect_equal(git_default_branch(), "master") badge_lines <- c( "", "[![Codecov test coverage](https://codecov.io/gh/OWNER/REPO/branch/master/graph/badge.svg)](https://codecov.io/gh/OWNER/REPO?branch=master)", "" ) cli::cat_line(badge_lines, file = proj_path("README.md")) gha_lines <- c( "on:", " push:", " branches:", " - master" ) create_directory(".github/workflows") cli::cat_line(gha_lines, file = path(".github", "workflows", "blah.yml")) create_directory("whatever/foo") cli::cat_line( "edit: https://github.com/OWNER/REPO/edit/master/%s", file = path("whatever", "foo", "_bookdown.yaml") ) withr::local_options(usethis.quiet = FALSE) expect_snapshot( git_default_branch_rename() ) }) usethis/tests/testthat/test-rmarkdown.R0000644000176200001440000000176114651000165020047 0ustar liggesuserstest_that("use_rmarkdown_template() creates everything as promised, defaults", { create_local_package() use_rmarkdown_template() path <- path("inst", "rmarkdown", "templates", "template-name") yml <- read_utf8(proj_path(path, "template.yaml")) expect_true( all( c( "name: Template Name", "description: >", " A description of the template", "create_dir: FALSE" ) %in% yml ) ) expect_proj_file(path, "skeleton", "skeleton.Rmd") }) test_that("use_rmarkdown_template() creates everything as promised, args", { create_local_package() use_rmarkdown_template( template_name = "aaa", template_dir = "bbb", template_description = "ccc", template_create_dir = TRUE ) path <- path("inst", "rmarkdown", "templates", "bbb") yml <- read_utf8(proj_path(path, "template.yaml")) expect_true( all( c("name: aaa", "description: >", " ccc", "create_dir: TRUE") %in% yml ) ) expect_proj_file(path, "skeleton", "skeleton.Rmd") }) usethis/tests/testthat/test-ci.R0000644000176200001440000000207314651000165016433 0ustar liggesuserstest_that("use_circleci() configures CircleCI", { skip_if_no_git_user() local_interactive(FALSE) create_local_package() use_git() local_target_repo_spec("OWNER/REPO") use_circleci(browse = FALSE) expect_true(is_build_ignored("^\\.circleci$")) expect_proj_dir(".circleci") expect_proj_file(".circleci/config.yml") yml <- yaml::yaml.load_file(proj_path(".circleci", "config.yml")) expect_identical( yml$jobs$build$steps[[7]]$store_artifacts$path, paste0(project_name(), ".Rcheck/") ) # use_circleci() properly formats keys for cache expect_identical( yml$jobs$build$steps[[1]]$restore_cache$keys, c("r-pkg-cache-{{ arch }}-{{ .Branch }}", "r-pkg-cache-{{ arch }}-") ) expect_identical( yml$jobs$build$steps[[8]]$save_cache$key, "r-pkg-cache-{{ arch }}-{{ .Branch }}" ) dir_delete(proj_path(".circleci")) docker <- "rocker/r-ver:3.5.3" use_circleci(browse = FALSE, image = docker) yml <- yaml::yaml.load_file(proj_path(".circleci", "config.yml")) expect_identical(yml$jobs$build$docker[[1]]$image, docker) }) usethis/tests/testthat/test-helpers.R0000644000176200001440000000742714717524721017526 0ustar liggesuserstest_that("valid_package_name() enforces valid package names", { # Contain only ASCII letters, numbers, and '.' # Have at least two characters # Start with a letter # Not end with '.' expect_true(valid_package_name("aa")) expect_true(valid_package_name("a7")) expect_true(valid_package_name("a.2")) expect_false(valid_package_name("a")) expect_false(valid_package_name("a-2")) expect_false(valid_package_name("2fa")) expect_false(valid_package_name(".fa")) expect_false(valid_package_name("aa\u00C0")) # \u00C0 is a-grave expect_false(valid_package_name("a3.")) }) test_that("valid_file_name() enforces valid file names", { # Contain only ASCII letters, numbers, '-', and '_' expect_true(valid_file_name("aa.R")) expect_true(valid_file_name("a7.R")) expect_true(valid_file_name("a-2.R")) expect_true(valid_file_name("a_2.R")) expect_false(valid_file_name("aa\u00C0.R")) # \u00C0 is a-grave expect_false(valid_file_name("a?3.R")) }) # use_dependency ---------------------------------------------------------- test_that("we message for new type and are silent for same type", { create_local_package() withr::local_options(usethis.quiet = FALSE) expect_snapshot( use_dependency("crayon", "Imports") ) expect_silent(use_dependency("crayon", "Imports")) }) test_that("we message for version change and are silent for same version", { create_local_package() withr::local_options(usethis.quiet = FALSE) expect_snapshot( use_dependency("crayon", "Imports") ) expect_snapshot( use_dependency("crayon", "Imports", min_version = "1.0.0") ) expect_silent(use_dependency("crayon", "Imports", min_version = "1.0.0")) expect_snapshot( use_dependency("crayon", "Imports", min_version = "2.0.0") ) expect_snapshot( use_dependency("crayon", "Imports", min_version = "1.0.0") ) }) ## https://github.com/r-lib/usethis/issues/99 test_that("use_dependency() upgrades a dependency", { create_local_package() withr::local_options(usethis.quiet = FALSE) expect_snapshot(use_dependency("usethis", "Suggests")) expect_match(desc::desc_get("Suggests"), "usethis") expect_snapshot(use_dependency("usethis", "Imports")) expect_match(desc::desc_get("Imports"), "usethis") expect_no_match(desc::desc_get("Suggests"), "usethis") }) ## https://github.com/r-lib/usethis/issues/99 test_that("use_dependency() declines to downgrade a dependency", { create_local_package() withr::local_options(usethis.quiet = FALSE) expect_snapshot(use_dependency("usethis", "Imports")) expect_match(desc::desc_get("Imports"), "usethis") expect_snapshot(use_dependency("usethis", "Suggests")) expect_match(desc::desc_get("Imports"), "usethis") expect_no_match(desc::desc_get("Suggests"), "usethis") }) test_that("can add LinkingTo dependency if other dependency already exists", { create_local_package() use_dependency("rlang", "Imports") withr::local_options(usethis.quiet = FALSE) expect_snapshot( use_dependency("rlang", "LinkingTo") ) deps <- proj_deps() expect_setequal(deps$type, c("Imports", "LinkingTo")) expect_setequal(deps$package, "rlang") }) test_that("use_dependency() does not fall over on 2nd LinkingTo request", { create_local_package() local_interactive(FALSE) use_dependency("rlang", "LinkingTo") withr::local_options(usethis.quiet = FALSE) expect_snapshot(use_dependency("rlang", "LinkingTo")) }) # https://github.com/r-lib/usethis/issues/1649 test_that("use_dependency() can level up a LinkingTo dependency", { create_local_package() use_dependency("rlang", "LinkingTo") use_dependency("rlang", "Suggests") withr::local_options(usethis.quiet = FALSE) expect_snapshot(use_package("rlang")) deps <- proj_deps() expect_setequal(deps$type, c("Imports", "LinkingTo")) expect_setequal(deps$package, "rlang") }) usethis/tests/testthat/test-data-table.R0000644000176200001440000000162014651000165020033 0ustar liggesuserstest_that("use_data_table() requires a package", { create_local_project() expect_usethis_error(use_data_table(), "not an R package") }) test_that("use_data_table() Imports data.table", { create_local_package() use_package_doc() local_check_installed() local_roxygen_update_ns() local_check_fun_exists() use_data_table() expect_match(proj_desc()$get("Imports"), "data.table") expect_snapshot(roxygen_ns_show()) }) test_that("use_data_table() blocks use of Depends", { local_interactive(FALSE) create_local_package() use_package_doc() desc::desc_set("Depends", "data.table") local_check_installed() local_roxygen_update_ns() local_check_fun_exists() withr::local_options(list(usethis.quiet = FALSE)) expect_snapshot( use_data_table(), transform = scrub_testpkg ) expect_match(desc::desc_get("Imports"), "data.table") expect_snapshot(roxygen_ns_show()) }) usethis/tests/testthat/helper.R0000644000176200001440000001042114712743552016352 0ustar liggesusers## If session temp directory appears to be, or be within, a project, there ## will be large scale, spurious test failures. ## The IDE sometimes leaves .Rproj files behind in session temp directory or ## one of its parents. ## Delete such files manually. session_temp_proj <- proj_find(path_temp()) if (!is.null(session_temp_proj)) { Rproj_files <- fs::dir_ls(session_temp_proj, glob = "*.Rproj") ui_bullets(c( "x" = "Rproj {cli::qty(length(Rproj_files))} file{?s} found at or above session temp dir:", bulletize(usethis_map_cli(Rproj_files)), "!" = "Expect this to cause spurious test failures." )) } create_local_package <- function(dir = file_temp(pattern = "testpkg"), env = parent.frame(), rstudio = FALSE) { create_local_thing(dir, env, rstudio, "package") } create_local_project <- function(dir = file_temp(pattern = "testproj"), env = parent.frame(), rstudio = FALSE) { create_local_thing(dir, env, rstudio, "project") } create_local_thing <- function(dir = file_temp(pattern = pattern), env = parent.frame(), rstudio = FALSE, thing = c("package", "project")) { thing <- match.arg(thing) if (fs::dir_exists(dir)) { ui_abort("Target {.arg dir} {.path {pth(dir)}} already exists.") } old_project <- proj_get_() # this could be `NULL`, i.e. no active project old_wd <- getwd() # not necessarily same as `old_project` withr::defer( { ui_bullets(c("Deleting temporary project: {.path {dir}}")) fs::dir_delete(dir) }, envir = env ) ui_silence( switch( thing, package = create_package( dir, # This is for the sake of interactive development of snapshot tests. # When the active usethis project is a package created with this # function, testthat learns its edition from *that* package, not from # usethis. So, by default, opt in to testthat 3e in these ephemeral test # packages. fields = list("Config/testthat/edition" = "3"), rstudio = rstudio, open = FALSE, check_name = FALSE ), project = create_project(dir, rstudio = rstudio, open = FALSE) ) ) withr::defer(proj_set(old_project, force = TRUE), envir = env) proj_set(dir) withr::defer( { ui_bullets(c("Restoring original working directory: {.path {old_wd}}")) setwd(old_wd) }, envir = env ) setwd(proj_get()) invisible(proj_get()) } scrub_testpkg <- function(message) { gsub("testpkg[a-zA-Z0-9]+", "{TESTPKG}", message, perl = TRUE) } scrub_testproj <- function(message) { gsub("testproj[a-zA-Z0-9]+", "{TESTPROJ}", message, perl = TRUE) } skip_if_not_ci <- function() { ci_providers <- c("GITHUB_ACTIONS", "TRAVIS", "APPVEYOR") ci <- any(toupper(Sys.getenv(ci_providers)) == "TRUE") if (ci) { return(invisible(TRUE)) } skip("Not on GitHub Actions, Travis, or Appveyor") } skip_if_no_git_user <- function() { user_name <- git_cfg_get("user.name") user_email <- git_cfg_get("user.email") user_name_exists <- !is.null(user_name) user_email_exists <- !is.null(user_email) if (user_name_exists && user_email_exists) { return(invisible(TRUE)) } skip("No Git user configured") } # CRAN's mac builder sets $HOME to a read-only ram disk, so tests can fail if # you even tickle something that might try to lock its own config file during # the operation (e.g. git) or if you simply test for writeability skip_on_cran_macos <- function() { sysname <- tolower(Sys.info()[["sysname"]]) on_cran <- !identical(Sys.getenv("NOT_CRAN"), "true") if (on_cran && sysname == "darwin") { skip("On CRAN and on macOS") } invisible(TRUE) } expect_usethis_error <- function(...) { expect_error(..., class = "usethis_error") } is_build_ignored <- function(pattern, ..., base_path = proj_get()) { lines <- read_utf8(path(base_path, ".Rbuildignore")) length(grep(pattern, x = lines, fixed = TRUE, ...)) > 0 } test_file <- function(fname) testthat::test_path("ref", fname) expect_proj_file <- function(...) expect_true(file_exists(proj_path(...))) expect_proj_dir <- function(...) expect_true(dir_exists(proj_path(...))) usethis/tests/testthat/test-tutorial.R0000644000176200001440000000111314651000165017675 0ustar liggesuserstest_that("use_tutorial() checks its inputs", { skip_if_not_installed("rmarkdown") create_local_package() expect_snapshot(use_tutorial(), error = TRUE) expect_snapshot(use_tutorial(name = "tutorial-file"), error = TRUE) }) test_that("use_tutorial() creates a tutorial", { skip_if_not_installed("rmarkdown") create_local_package() local_check_installed() use_tutorial(name = "aaa", title = "bbb") tute_file <- path("inst", "tutorials", "aaa", "aaa", ext = "Rmd") expect_proj_file(tute_file) expect_equal(rmarkdown::yaml_front_matter(tute_file)$title, "bbb") }) usethis/tests/testthat/test-utils-github.R0000644000176200001440000001663714717524721020507 0ustar liggesuserstest_that("parse_github_remotes() works, on named list or named character", { urls <- list( https = "https://github.com/OWNER/REPO.git", ghe = "https://github.acme.com/OWNER/REPO.git", browser = "https://github.com/OWNER/REPO", ssh1 = "git@github.com:OWNER/REPO.git", ssh2 = "ssh://git@github.com/OWNER/REPO.git", gitlab1 = "https://gitlab.com/OWNER/REPO.git", gitlab2 = "git@gitlab.com:OWNER/REPO.git", bitbucket1 = "https://bitbucket.org/OWNER/REPO.git", bitbucket2 = "git@bitbucket.org:OWNER/REPO.git" ) parsed <- parse_github_remotes(urls) expect_equal(parsed$name, names(urls)) expect_equal(unique(parsed$repo_owner), "OWNER") expect_equal( parsed$host, c("github.com", "github.acme.com", "github.com", "github.com", "github.com", "gitlab.com", "gitlab.com", "bitbucket.org", "bitbucket.org") ) expect_equal(unique(parsed$repo_name), "REPO") expect_equal( parsed$protocol, c("https", "https", "https", "ssh", "ssh", "https", "ssh", "https", "ssh") ) parsed2 <- parse_github_remotes(unlist(urls)) expect_equal(parsed, parsed2) }) test_that("parse_github_remotes() works on edge cases", { parsed <- parse_github_remotes("https://github.com/HenrikBengtsson/R.rsp") expect_equal(parsed$repo_owner, "HenrikBengtsson") expect_equal(parsed$repo_name, "R.rsp") }) test_that("parse_github_remotes() works for length zero input", { expect_no_error( parsed <- parse_github_remotes(character()) ) expect_equal(nrow(parsed), 0) expect_setequal( names(parsed), c("name", "url", "host", "repo_owner", "repo_name", "protocol") ) }) test_that("parse_repo_url() passes a naked repo spec through", { out <- parse_repo_url("OWNER/REPO") expect_equal( out, list(repo_spec = "OWNER/REPO", host = NULL) ) }) test_that("parse_repo_url() handles GitHub remote URLs", { urls <- list( https = "https://github.com/OWNER/REPO.git", ghe = "https://github.acme.com/OWNER/REPO.git", browser = "https://github.com/OWNER/REPO", ssh = "git@github.com:OWNER/REPO.git" ) out <- map(urls, parse_repo_url) expect_match(map_chr(out, "repo_spec"), "OWNER/REPO", fixed = TRUE) out_host <- map_chr(out, "host") expect_match( out_host[c("https", "browser", "ssh")], "https://github.com", fixed = TRUE ) expect_equal(out_host[["ghe"]], "https://github.acme.com") }) test_that("parse_repo_url() errors for non-GitHub remote URLs", { urls <- list( gitlab1 = "https://gitlab.com/OWNER/REPO.git", gitlab2 = "git@gitlab.com:OWNER/REPO.git", bitbucket1 = "https://bitbucket.org/OWNER/REPO.git", bitbucket2 = "git@bitbucket.org:OWNER/REPO.git" ) safely_parse_repo_url <- purrr::safely(parse_repo_url) out <- map(urls, safely_parse_repo_url) out_result <- map(out, "result") expect_true(all(map_lgl(out_result, is.null))) }) test_that("github_remote_list() works", { local_interactive(FALSE) create_local_project() use_git() use_git_remote("origin", "https://github.com/OWNER/REPO.git") use_git_remote("upstream", "https://github.com/THEM/REPO.git") use_git_remote("foofy", "https://github.com/OTHERS/REPO.git") use_git_remote("gitlab", "https://gitlab.com/OTHERS/REPO.git") use_git_remote("bitbucket", "git@bitbucket.org:OWNER/REPO.git") grl <- github_remote_list() expect_setequal(grl$remote, c("origin", "upstream")) expect_setequal(grl$repo_spec, c("OWNER/REPO", "THEM/REPO")) grl <- github_remote_list(c("upstream", "foofy")) expect_setequal(grl$remote, c("upstream", "foofy")) nms <- names(grl) grl <- github_remote_list(c("gitlab", "bitbucket")) expect_equal(nrow(grl), 0) expect_named(grl, nms) }) test_that("github_remotes(), github_remote_list() accept explicit 0-row input", { x <- data.frame(name = character(), url = character(), stringsAsFactors = FALSE) grl <- github_remote_list(x = x) expect_equal(nrow(grl), 0) expect_true(all(map_lgl(grl, is.character))) gr <- github_remotes(x = x) expect_equal(nrow(grl), 0) }) test_that("github_remotes() works", { skip_if_offline("github.com") skip_if_no_git_user() create_local_project() use_git() # no git remotes = 0-row edge case expect_no_error( grl <- github_remotes() ) # a public remote = no token necessary to get github info use_git_remote("origin", "https://github.com/r-lib/usethis.git") expect_no_error( grl <- github_remotes() ) expect_false(grl$is_fork) expect_true(is.na(grl$parent_repo_owner)) # no git remote by this name = 0-row edge case expect_no_error( grl <- github_remotes("foofy") ) # gh::gh() call should fail, so we should get no info from github use_git_remote("origin", "https://github.com/r-lib/DOESNOTEXIST.git", overwrite = TRUE) expect_no_error( grl <- github_remotes() ) expect_true(is.na(grl$is_fork)) }) test_that("github_url_from_git_remotes() is idempotent", { url <- "https://github.com/r-lib/usethis.git" out <- github_url_from_git_remotes(url) expect_equal(out, github_url_from_git_remotes(out)) }) # GitHub remote configuration -------------------------------------------------- test_that("we understand the list of all possible configs", { expect_snapshot(all_configs()) }) test_that("'no_github' is reported correctly", { expect_snapshot(new_no_github()) }) test_that("'ours' is reported correctly", { expect_snapshot(new_ours()) }) test_that("'theirs' is reported correctly", { expect_snapshot(new_theirs()) }) test_that("'fork' is reported correctly", { expect_snapshot(new_fork()) }) test_that("'maybe_ours_or_theirs' is reported correctly", { expect_snapshot(new_maybe_ours_or_theirs()) }) test_that("'maybe_fork' is reported correctly", { expect_snapshot(new_maybe_fork()) }) test_that("'fork_cannot_push_origin' is reported correctly", { expect_snapshot(new_fork_cannot_push_origin()) }) test_that("'fork_upstream_is_not_origin_parent' is reported correctly", { expect_snapshot(new_fork_upstream_is_not_origin_parent()) }) test_that("'upstream_but_origin_is_not_fork' is reported correctly", { expect_snapshot(new_upstream_but_origin_is_not_fork()) }) test_that("'fork_upstream_is_not_origin_parent' is detected correctly", { # inspired by something that actually happened: # 1. r-pkgs/gh is created # 2. user forks and clones: origin = USER/gh, upstream = r-pkgs/gh # 3. parent repo becomes r-lib/gh, due to transfer or ownership or owner # name change # Now upstream looks like it does not point to fork parent. local_interactive(FALSE) create_local_project() use_git() use_git_remote("origin", "https://github.com/jennybc/gh.git") use_git_remote("upstream", "https://github.com/r-pkgs/gh.git") gr <- github_remotes(github_get = FALSE) gr$github_got <- TRUE gr$is_fork <- c(TRUE, FALSE) gr$can_push <- TRUE gr$perm_known <- TRUE gr$parent_repo_owner <- c("r-lib", NA) gr$parent_repo_name <- c("gh", NA) gr$parent_repo_spec <- c("r-lib/gh", NA) local_mocked_bindings(github_remotes = function(...) gr) cfg <- github_remote_config() expect_equal(cfg$type, "fork_upstream_is_not_origin_parent") expect_snapshot(error = TRUE, stop_bad_github_remote_config(cfg)) }) test_that("bad github config error", { expect_snapshot( error = TRUE, stop_bad_github_remote_config(new_fork_upstream_is_not_origin_parent()) ) }) test_that("maybe bad github config error", { expect_snapshot( error = TRUE, stop_maybe_github_remote_config(new_maybe_fork()) ) }) usethis/tests/testthat/test-github-actions.R0000644000176200001440000000777314717524721021010 0ustar liggesuserstest_that("use_github_action() allows for custom urls", { skip_if_no_git_user() skip_if_offline("github.com") local_interactive(FALSE) create_local_package() use_git() use_git_remote(name = "origin", url = "https://github.com/OWNER/REPO") use_readme_md() withr::local_options(usethis.quiet = FALSE) expect_snapshot( use_github_action( url = "https://raw.githubusercontent.com/r-lib/actions/v2/examples/check-full.yaml", readme = "https://github.com/r-lib/actions/blob/v2/examples/README.md" ) ) expect_proj_dir(".github") expect_proj_dir(".github/workflows") expect_proj_file(".github/workflows/R-CMD-check.yaml") }) test_that("use_github_action() still errors in non-interactive environment", { expect_snapshot(use_github_action(), error = TRUE) }) test_that("use_github_action() appends yaml in name if missing", { skip_if_no_git_user() skip_if_offline("github.com") local_interactive(FALSE) create_local_package() use_git() use_git_remote(name = "origin", url = "https://github.com/OWNER/REPO") use_github_action("check-full") expect_proj_dir(".github") expect_proj_dir(".github/workflows") expect_proj_file(".github/workflows/R-CMD-check.yaml") }) test_that("use_github_action() accepts a ref", { skip_if_no_git_user() skip_if_offline("github.com") local_interactive(FALSE) create_local_package() use_git() use_git_remote(name = "origin", url = "https://github.com/OWNER/REPO") use_github_action("check-full", ref = "v1") expect_snapshot( read_utf8(proj_path(".github/workflows/R-CMD-check.yaml"), n = 1) ) }) test_that("uses_github_action() reports usage of GitHub Actions", { skip_if_no_git_user() skip_if_offline("github.com") local_interactive(FALSE) create_local_package() expect_false(uses_github_actions()) use_git() use_git_remote(name = "origin", url = "https://github.com/OWNER/REPO") local_mocked_bindings( use_github_actions_badge = function(name, repo_spec) NULL ) use_github_action("check-standard") expect_true(uses_github_actions()) }) test_that("check_uses_github_actions() can throw error", { create_local_package() withr::local_options(list(crayon.enabled = FALSE, cli.width = Inf)) expect_snapshot( check_uses_github_actions(), error = TRUE, transform = scrub_testpkg ) }) test_that("use_github_action() accepts a name", { skip_if_no_git_user() skip_if_offline("github.com") local_interactive(FALSE) create_local_package() use_git() use_git_remote(name = "origin", url = "https://github.com/OWNER/REPO") use_readme_md() use_github_action("check-release") expect_proj_dir(".github") expect_proj_dir(".github/workflows") expect_proj_file(".github/workflows/R-CMD-check.yaml") readme_lines <- read_utf8(proj_path("README.md")) expect_match(readme_lines, "R-CMD-check", all = FALSE) # .github has been Rbuildignored expect_true(is_build_ignored("^\\.github$")) }) test_that("use_tidy_github_actions() configures the full check and pr commands", { skip_if_no_git_user() skip_if_offline("github.com") local_interactive(FALSE) create_local_package() use_git() gert::git_add(".gitignore", repo = git_repo()) gert::git_commit("a commit, so we are not on an unborn branch", repo = git_repo()) use_git_remote(name = "origin", url = "https://github.com/OWNER/REPO") use_readme_md() use_tidy_github_actions() expect_proj_file(".github/workflows/R-CMD-check.yaml") yml <- yaml::yaml.load_file(proj_path(".github/workflows/R-CMD-check.yaml")) size_build_matrix <- length(yml[["jobs"]][["R-CMD-check"]][["strategy"]][["matrix"]][["config"]]) expect_gte(size_build_matrix, 6) # release, r-devel, 4 previous versions expect_proj_file(".github/workflows/pkgdown.yaml") expect_proj_file(".github/workflows/test-coverage.yaml") expect_proj_file(".github/workflows/pr-commands.yaml") readme_lines <- read_utf8(proj_path("README.md")) expect_match(readme_lines, "R-CMD-check", all = FALSE) expect_match(readme_lines, "test coverage", all = FALSE) }) usethis/tests/testthat/test-latest-dependencies.R0000644000176200001440000000211714651000165021757 0ustar liggesuserstest_that("sets version for imports & depends dependencies", { skip_if_offline() withr::local_options(list(repos = c(CRAN = "https://cloud.r-project.org"))) create_local_package() use_package("usethis") use_package("desc", "Depends") use_latest_dependencies() deps <- proj_deps() expect_equal( deps$version[deps$package %in% c("usethis", "desc")] == "*", c(FALSE, FALSE) ) }) test_that("doesn't affect suggests", { skip_if_offline() withr::local_options(list(repos = c(CRAN = "https://cloud.r-project.org"))) create_local_package() use_package("cli", "Suggests") use_latest_dependencies() deps <- proj_deps() expect_equal(deps$version[deps$package == "cli"], "*") }) test_that("does nothing for a base package", { skip_if_offline() withr::local_options(list(repos = c(CRAN = "https://cloud.r-project.org"))) create_local_package() use_package("tools") # if usethis ever depends on a recommended package, we could test that here too use_latest_dependencies() deps <- proj_deps() expect_equal(deps$version[deps$package == "tools"], "*") }) usethis/tests/testthat/test-r.R0000644000176200001440000000552614717524721016323 0ustar liggesuserstest_that("use_r() creates a .R file below R/", { create_local_package() use_r("foo") expect_proj_file("R/foo.R") }) test_that("use_test() creates a test file", { create_local_package() use_test("foo", open = FALSE) expect_proj_file("tests", "testthat", "test-foo.R") }) test_that("use_test_helper() creates a helper file", { create_local_package() expect_snapshot( error = TRUE, use_test_helper(open = FALSE) ) use_testthat() use_test_helper(open = FALSE) withr::local_options(list(usethis.quiet = FALSE)) expect_snapshot( use_test_helper("foo", open = FALSE) ) expect_proj_file("tests", "testthat", "helper.R") expect_proj_file("tests", "testthat", "helper-foo.R") }) test_that("can use use_test() in a project", { create_local_project() expect_no_error(use_test("foofy")) }) # helpers ----------------------------------------------------------------- test_that("compute_name() errors if no RStudio", { local_rstudio_available(FALSE) expect_snapshot(compute_name(), error = TRUE) }) test_that("compute_name() sets extension if missing", { expect_equal(compute_name("foo"), "foo.R") }) test_that("compute_name() validates its inputs", { expect_snapshot(error = TRUE, { compute_name("foo.c") compute_name("R/foo.c") compute_name(c("a", "b")) compute_name("") compute_name("****") }) }) test_that("compute_active_name() errors if no files open", { expect_snapshot(compute_active_name(NULL), error = TRUE) }) test_that("compute_active_name() checks directory", { expect_snapshot(compute_active_name("foo/bar.R"), error = TRUE) }) test_that("compute_active_name() standardises name", { dir <- create_local_project() expect_equal( compute_active_name(path(dir, "R/bar.R"), "c"), "bar.c" ) expect_equal( compute_active_name(path(dir, "src/bar.cpp"), "R"), "bar.R" ) expect_equal( compute_active_name(path(dir, "tests/testthat/test-bar.R"), "R"), "bar.R" ) expect_equal( compute_active_name(path(dir, "tests/testthat/_snaps/bar.md"), "R"), "bar.R" ) # https://github.com/r-lib/usethis/issues/1690 expect_equal( compute_active_name(path(dir, "R/data.frame.R"), "R"), "data.frame.R" ) }) # https://github.com/r-lib/usethis/issues/1863 test_that("compute_name() accepts the declared extension", { expect_equal(compute_name("foo.cpp", ext = "cpp"), "foo.cpp") }) test_that("as_test_helper_file() works", { expect_equal(as_test_helper_file(), "helper.R") expect_equal(as_test_helper_file("helper"), "helper.R") expect_equal(as_test_helper_file("helper.R"), "helper.R") expect_equal(as_test_helper_file("stuff"), "helper-stuff.R") expect_equal(as_test_helper_file("helper-stuff"), "helper-stuff.R") expect_equal(as_test_helper_file("stuff.R"), "helper-stuff.R") expect_equal(as_test_helper_file("helper-stuff.R"), "helper-stuff.R") }) usethis/tests/testthat/test-license.R0000644000176200001440000000274514651000165017470 0ustar liggesuserstest_that("use_mit_license() works", { create_local_package() use_mit_license() expect_equal(desc::desc_get_field("License"), "MIT + file LICENSE") expect_proj_file("LICENSE.md") expect_true(is_build_ignored("^LICENSE\\.md$")) expect_proj_file("LICENSE") expect_false(is_build_ignored("^LICENSE$")) }) test_that("use_proprietary_license() works", { create_local_package() use_proprietary_license("foo") expect_equal(desc::desc_get_field("License"), "file LICENSE") expect_proj_file("LICENSE") # TODO add snapshot test }) test_that("other licenses work without error", { create_local_package() expect_error(use_agpl_license(3), NA) expect_error(use_apache_license(2), NA) expect_error(use_cc0_license(), NA) expect_error(use_ccby_license(), NA) expect_error(use_gpl_license(2), NA) expect_error(use_gpl_license(3), NA) expect_error(use_lgpl_license(2.1), NA) expect_error(use_lgpl_license(3), NA) # old fallbacks expect_error(use_agpl3_license(), NA) expect_error(use_gpl3_license(), NA) expect_error(use_apl2_license(), NA) }) test_that("check license gives useful errors", { expect_error(check_license_version(1, 2), "must be 2") expect_error(check_license_version(1, 2:4), "must be 2, 3, or 4") }) test_that("generate correct abbreviations", { expect_equal(license_abbr("GPL", 2, TRUE), "GPL (>= 2)") expect_equal(license_abbr("GPL", 2, FALSE), "GPL-2") expect_equal(license_abbr("Apache License", 2, FALSE), "Apache License (== 2)") }) usethis/tests/testthat/test-citation.R0000644000176200001440000000021014651000165017641 0ustar liggesuserstest_that("use_citation() creates promised file", { create_local_package() use_citation() expect_proj_file("inst", "CITATION") }) usethis/tests/testthat/test-package.R0000644000176200001440000000517114651000165017435 0ustar liggesuserstest_that("use_package() won't facilitate dependency on tidyverse/tidymodels", { create_local_package() expect_usethis_error(use_package("tidyverse"), "rarely a good idea") expect_usethis_error(use_package("tidymodels"), "rarely a good idea") }) test_that("use_package() guides new packages but not pre-existing ones", { create_local_package() withr::local_options(usethis.quiet = FALSE) expect_snapshot({ use_package("withr") use_package("withr") use_package("withr", "Suggests") }) }) test_that("use_package() handles R versions with aplomb", { create_local_package() withr::local_options(usethis.quiet = FALSE) expect_snapshot(use_package("R"), error = TRUE) expect_snapshot(use_package("R", type = "Depends"), error = TRUE) expect_snapshot(use_package("R", type = "Depends", min_version = "3.6")) expect_equal(subset(proj_deps(), package == "R")$version, ">= 3.6") local_mocked_bindings(r_version = function() "4.1") expect_snapshot(use_package("R", type = "Depends", min_version = TRUE)) expect_equal(subset(proj_deps(), package == "R")$version, ">= 4.1") }) test_that("use_package(type = 'Suggests') guidance w/o and w/ rlang", { create_local_package() withr::local_options(usethis.quiet = FALSE) expect_snapshot(use_package("withr", "Suggests")) ui_silence(use_package("rlang")) expect_snapshot(use_package("purrr", "Suggests")) }) # use_dev_package() ----------------------------------------------------------- test_that("use_dev_package() writes a remote", { create_local_package() local_ui_yep() use_dev_package("usethis") expect_equal(proj_desc()$get_remotes(), "r-lib/usethis") }) test_that("use_dev_package() can override over default remote", { create_local_package() use_dev_package("usethis", remote = "github::r-lib/usethis") expect_equal(proj_desc()$get_remotes(), "github::r-lib/usethis") }) test_that("package_remote() works for an installed package with github URL", { d <- desc::desc(text = c( "Package: test", "URL: https://github.com/OWNER/test" )) local_ui_yep() expect_equal(package_remote(d), "OWNER/test") }) test_that("package_remote() works for package installed from github or gitlab", { d <- desc::desc(text = c( "Package: test", "RemoteUsername: OWNER", "RemoteRepo: test" )) d$set(RemoteType = "github") expect_equal(package_remote(d), "OWNER/test") d$set(RemoteType = "gitlab") expect_equal(package_remote(d), "gitlab::OWNER/test") }) test_that("package_remote() errors if no remote and no github URL", { d <- desc::desc(text = c("Package: test")) expect_usethis_error(package_remote(d), "Cannot determine remote") }) usethis/tests/testthat/test-coverage.R0000644000176200001440000000026414651514262017643 0ustar liggesuserstest_that("we use specific URLs in a codecov badge", { create_local_package() withr::local_options(usethis.quiet = FALSE) expect_snapshot(use_codecov_badge("OWNER/REPO")) }) usethis/tests/testthat/test-block.R0000644000176200001440000000101214651000165017122 0ustar liggesuserstest_that("block_append() only writes unique lines", { path <- withr::local_tempfile() writeLines(block_create(), path) block_append("---", c("x", "y"), path) block_append("---", c("y", "x"), path) expect_equal(block_show(path), c("x", "y")) }) test_that("block_append() can sort, if requested", { path <- withr::local_tempfile() writeLines(block_create(), path) block_append("---", c("z", "y"), path) block_append("---", "x", path, sort = TRUE) expect_equal(block_show(path), c("x", "y", "z")) }) usethis/tests/testthat/test-line-ending.R0000644000176200001440000000224414651000165020231 0ustar liggesuserstest_that("can detect path from RStudio project file", { create_local_package() use_rstudio("posix") expect_equal(proj_line_ending(), "\n") file_delete(proj_path(paste(paste0(project_name(), ".Rproj")))) use_rstudio("windows") expect_equal(proj_line_ending(), "\r\n") }) test_that("can detect path from DESCRIPTION or .R file", { create_local_project() write_utf8(proj_path("DESCRIPTION"), c("x", "y", "z"), line_ending = "\r\n") expect_equal(proj_line_ending(), "\r\n") file_delete(proj_path("DESCRIPTION")) dir_create(proj_path("R")) write_utf8(proj_path("R/test.R"), c("x", "y", "z"), line_ending = "\r\n") expect_equal(proj_line_ending(), "\r\n") }) test_that("falls back to platform specific encoding", { create_local_project() expect_equal(proj_line_ending(), platform_line_ending()) }) test_that("correctly detect line encoding", { path <- file_temp() con <- file(path, open = "wb") writeLines(c("a", "b", "c"), con, sep = "\n") close(con) expect_equal(detect_line_ending(path), "\n") con <- file(path, open = "wb") writeLines(c("a", "b", "c"), con, sep = "\r\n") close(con) expect_equal(detect_line_ending(path), "\r\n") }) usethis/tests/testthat/test-github.R0000644000176200001440000000270114717524721017334 0ustar liggesuserstest_that("has_github_links() uses the target_repo, if provided", { skip_if_no_git_user() create_local_package() local_interactive(FALSE) use_git() desc::desc_set_urls("https://github.com/OWNER/REPO") desc::desc_set("BugReports", "https://github.com/OWNER/REPO/issues") tr <- list(url = "git@github.com:OWNER/REPO.git") expect_true(has_github_links(tr)) }) test_that("use_github_links populates empty URL field", { skip_if_no_git_user() local_interactive(FALSE) create_local_package() use_git() local_mocked_bindings( github_url_from_git_remotes = function() "https://github.com/OWNER/REPO" ) # when no URL field use_github_links() expect_equal(proj_desc()$get_urls(), "https://github.com/OWNER/REPO") expect_equal( proj_desc()$get_field("BugReports"), "https://github.com/OWNER/REPO/issues" ) }) test_that("use_github_links() aborts or appends URLs when it should", { skip_if_no_git_user() local_interactive(FALSE) create_local_package() use_git() local_mocked_bindings( github_url_from_git_remotes = function() "https://github.com/OWNER/REPO" ) d <- proj_desc() d$set_urls(c("https://existing.url", "https://existing.url1")) d$write() expect_snapshot(use_github_links(overwrite = FALSE), error = TRUE) use_github_links(overwrite = TRUE) expect_equal( proj_desc()$get_urls(), c("https://existing.url", "https://existing.url1", "https://github.com/OWNER/REPO") ) }) usethis/tests/testthat/test-use_import_from.R0000644000176200001440000000143114717524762021267 0ustar liggesuserstest_that("use_import_from() imports the related package & adds line to package doc", { create_local_package() use_package_doc() use_import_from("lifecycle", "deprecated") expect_equal(proj_desc()$get_field("Imports"), "lifecycle") expect_equal(roxygen_ns_show(), "#' @importFrom lifecycle deprecated") }) test_that("use_import_from() adds one line for each function", { create_local_package() use_package_doc() use_import_from("lifecycle", c("deprecate_warn", "deprecate_stop")) expect_snapshot(roxygen_ns_show()) }) test_that("use_import_from() generates helpful errors", { create_local_package() use_package_doc() expect_snapshot(error = TRUE, { use_import_from(1) use_import_from(c("desc", "rlang")) use_import_from("desc", "pool_noodle") }) }) usethis/tests/testthat/test-revdep.R0000644000176200001440000000050214651000165017320 0ustar liggesuserstest_that("use_revdep() requires a package", { create_local_project() expect_usethis_error(use_revdep(), "not an R package") }) test_that("use_revdep() creates and ignores files/dirs", { create_local_package() use_revdep() expect_proj_file("revdep", ".gitignore") expect_true(is_build_ignored("^revdep$")) }) usethis/tests/testthat/ref/0000755000176200001440000000000014717524721015526 5ustar liggesusersusethis/tests/testthat/ref/foo-implicit-parent.zip0000644000176200001440000000032414717524721022133 0ustar liggesusersPK húXíý&& foo/file.txtUT u¤fx¤fux öI am file.txt which lives inside foo/ PK húXíý&& ¤foo/file.txtUTu¤fux öPKRlusethis/tests/testthat/ref/yo-explicit-parent.zip0000644000176200001440000000155414717524721022014 0ustar liggesusersPK húXyo/UT u¤fu¤fux öPK húX yo/subdir2/UT u¤fu¤fux öPK húX¶ØÌd**yo/subdir2/file2.txtUT u¤fx¤fux öI am file2.txt, located below yo/subdir2/ PK húX yo/subdir1/UT u¤fu¤fux öPK húXú×=**yo/subdir1/file1.txtUT u¤fx¤fux öI am file1.txt, located below yo/subdir1/ PK húXíAyo/UTu¤fux öPK húX íA=yo/subdir2/UTu¤fux öPK húX¶ØÌd**¤‚yo/subdir2/file2.txtUTu¤fux öPK húX íAúyo/subdir1/UTu¤fux öPK húXú×=**¤?yo/subdir1/file1.txtUTu¤fux öPKŸ·usethis/tests/testthat/ref/foo-loose-dropbox.zip0000644000176200001440000000041214651000165021610 0ustar liggesusersPK!/PKPKr™+Lfile.txtóTHÌUHËÌIÕ+©(Q(ÏÈLÎPÈÉ,K-VÈÌ+ÎLIUHËÏ×çPKíý(&PK!ä/PKr™+Líý(& ä1file.txtPKeusethis/tests/testthat/ref/foo-no-parent.zip0000644000176200001440000000031414717524721020734 0ustar liggesusersPK húXíý&&file.txtUT u¤fx¤fux öI am file.txt which lives inside foo/ PK húXíý&&¤file.txtUTu¤fux öPKNhusethis/tests/testthat/ref/yo-implicit-parent.zip0000644000176200001440000000134614717524721022004 0ustar liggesusersPK húX yo/subdir1/UT u¤fu¤fux öPK húXú×=**yo/subdir1/file1.txtUT u¤fx¤fux öI am file1.txt, located below yo/subdir1/ PK húX yo/subdir2/UT u¤fu¤fux öPK húX¶ØÌd**yo/subdir2/file2.txtUT u¤fx¤fux öI am file2.txt, located below yo/subdir2/ PK húX íAyo/subdir1/UTu¤fux öPK húXú×=**¤Eyo/subdir1/file1.txtUTu¤fux öPK húX íA½yo/subdir2/UTu¤fux öPK húX¶ØÌd**¤yo/subdir2/file2.txtUTu¤fux öPKVzusethis/tests/testthat/ref/foo-explicit-parent.zip0000644000176200001440000000053414717524721022145 0ustar liggesusersPK húXfoo/UT u¤fu¤fux öPK húXíý&& foo/file.txtUT u¤fx¤fux öI am file.txt which lives inside foo/ PK húXíAfoo/UTu¤fux öPK húXíý&& ¤>foo/file.txtUTu¤fux öPKœªusethis/tests/testthat/ref/yo/0000755000176200001440000000000014721320322016137 5ustar liggesusersusethis/tests/testthat/ref/yo/subdir2/0000755000176200001440000000000014651000165017513 5ustar liggesusersusethis/tests/testthat/ref/yo/subdir2/file2.txt0000644000176200001440000000005214651000165021252 0ustar liggesusersI am file2.txt, located below yo/subdir2/ usethis/tests/testthat/ref/yo/subdir1/0000755000176200001440000000000014651000165017512 5ustar liggesusersusethis/tests/testthat/ref/yo/subdir1/file1.txt0000644000176200001440000000005214651000165021250 0ustar liggesusersI am file1.txt, located below yo/subdir1/ usethis/tests/testthat/ref/yo-loose-dropbox.zip0000644000176200001440000000124614651000165021462 0ustar liggesusersPK/¿+L/PKPK0¿+Lsubdir1/file1.txtóTHÌUHËÌI5Ô+©(ÑQÈÉON,IMQHJÍÉ/W¨Ì×/.MJÉ,2ÔçPKú×=,*PK/¿+Lsubdir2/file2.txtóTHÌUHËÌI5Ò+©(ÑQÈÉON,IMQHJÍÉ/W¨Ì×/.MJÉ,2ÒçPK¶ØÌd,*PK0¿+Lsubdir1/PKPK/¿+Lsubdir2/PKPK/¿+Lä/PK0¿+Lú×=,* ä1subdir1/file1.txtPK/¿+L¶ØÌd,* äœsubdir2/file2.txtPK0¿+Läsubdir1/PK/¿+Lä?subdir2/PKwusethis/tests/testthat/ref/foo/0000755000176200001440000000000014721320322016273 5ustar liggesusersusethis/tests/testthat/ref/foo/file.txt0000644000176200001440000000004614651000165017755 0ustar liggesusersI am file.txt which lives inside foo/ usethis/tests/testthat/ref/README.md0000644000176200001440000001575314717524721017020 0ustar liggesusersZIP file structures ================ ``` r devtools::load_all("~/rrr/usethis") #> ℹ Loading usethis library(fs) ``` ## Different styles of ZIP file usethis has an unexported function `tidy_unzip()`, which is used under the hood in `use_course()` and `use_zip()`. It is a wrapper around `utils::unzip()` that uses some heuristics to choose a good value for `exdir`, which is the “the directory to extract files to.†Why do we do this? Because it’s really easy to *not* get the desired result when unpacking a ZIP archive. Common aggravations: - Instead of the unpacked files being corraled within a folder, they explode as “loose parts†into the current working directory. Too little nesting. - The unpacked files are contained in a folder, but that folder itself is contained inside another folder. Too much nesting. `tidy_unzip()` tries to get the nesting just right. Why doesn’t unzipping “just workâ€? Because the people who make `.zip` files make lots of different choices when they actually create the archive and these details aren’t baked in, i.e. a successful roundtrip isn’t automatic. It usually requires some peeking inside the archive and adjusting the unpack options. This README documents specific `.zip` situations that we anticipate. ## Explicit parent folder Consider the foo folder: ``` bash tree foo #> foo #> └── file.txt #> #> 1 directory, 1 file ``` Zip it up like so: ``` bash zip -r foo-explicit-parent.zip foo/ ``` This is the type of ZIP file that we get from GitHub via links of the forms and . Inspect it in the shell: ``` bash unzip -Z1 foo-explicit-parent.zip #> foo/ #> foo/file.txt ``` Or from R: ``` r foo_files <- unzip("foo-explicit-parent.zip", list = TRUE) with( foo_files, data.frame(Name = Name, dirname = path_dir(Name), basename = path_file(Name)) ) #> Name dirname basename #> 1 foo/ . foo #> 2 foo/file.txt foo file.txt ``` Note that the folder `foo/` is explicitly included and all of the files are contained in it (in this case, just one file). ## Implicit parent folder Consider the foo folder: ``` bash tree foo #> foo #> └── file.txt #> #> 1 directory, 1 file ``` Zip it up like so: ``` bash zip -r foo-implicit-parent.zip foo/* ``` Note the use of `foo/*`, as opposed to `foo` or `foo/`. This type of ZIP file was reported in . The example given there is . Inspect our small example in the shell: ``` bash unzip -Z1 foo-implicit-parent.zip #> foo/file.txt ``` Or from R: ``` r foo_files <- unzip("foo-implicit-parent.zip", list = TRUE) with( foo_files, data.frame(Name = Name, dirname = path_dir(Name), basename = path_file(Name)) ) #> Name dirname basename #> 1 foo/file.txt foo file.txt ``` Note that `foo/` is not included and its (original) existence is just implicit in the relative path to, e.g., `foo/file.txt`. Here’s a similar look at the example from issue \#1961: ``` bash ~/rrr/usethis/tests/testthat/ref % unzip -l ~/Downloads/Species\ v2.3.zip Archive: /Users/jenny/Downloads/Species v2.3.zip Length Date Time Name --------- ---------- ----- ---- 1241 04-27-2023 09:16 species_v2/label_encoder.txt 175187560 04-06-2023 15:13 species_v2/model_arch.pt 174953575 04-14-2023 12:28 species_v2/model_weights.pth --------- ------- 350142376 3 files ``` Note also that the implicit parent folder `species_v2` is not the base of the ZIP file name `Species v2.3.zip`. ## No parent Consider the foo folder: ``` bash tree foo #> foo #> └── file.txt #> #> 1 directory, 1 file ``` Zip it up like so: ``` bash (cd foo && zip -r ../foo-no-parent.zip .) ``` Note that we are zipping everything in a folder from *inside* the folder. Inspect our small example in the shell: ``` bash unzip -Z1 foo-no-parent.zip #> file.txt ``` Or from R: ``` r foo_files <- unzip("foo-no-parent.zip", list = TRUE) with( foo_files, data.frame(Name = Name, dirname = path_dir(Name), basename = path_file(Name)) ) #> Name dirname basename #> 1 file.txt . file.txt ``` All the files are packaged in the ZIP archive as “loose partsâ€, i.e. there is no explicit or implicit top-level directory. ## No parent, the DropBox Variation This is the structure of ZIP files yielded by DropBox via links of this form . I can’t figure out how to even do this with zip locally, so I had to create an example on DropBox and download it. Jim Hester reports it is possible with `archive::archive_write_files()`. It’s basically like the “no parent†example above, except it includes a spurious top-level directory `"/"`. Inspect our small example in the shell: ``` bash unzip -Z1 foo-loose-dropbox.zip #> / #> file.txt ``` Or from R: ``` r # curl::curl_download( # "https://www.dropbox.com/sh/5qfvssimxf2ja58/AABz3zrpf-iPYgvQCgyjCVdKa?dl=1", # destfile = "foo-loose-dropbox.zip" # ) foo_files <- unzip("foo-loose-dropbox.zip", list = TRUE) with( foo_files, data.frame(Name = Name, dirname = path_dir(Name), basename = path_file(Name)) ) #> Name dirname basename #> 1 / / #> 2 file.txt . file.txt ``` Also note that, when unzipping with `unzip` in the shell, you get this result: Archive: foo-loose-dropbox.zip warning: stripped absolute path spec from / mapname: conversion of failed inflating: file.txt which indicates some tripping over the `/`. Only `file.txt` is left behind. This is a pretty odd ZIP packing strategy. But we need to plan for it. ## Only subdirectories For testing purposes, we also want an example where all the files are inside subdirectories. Examples based on the yo directory here: ``` bash tree yo #> yo #> ├── subdir1 #> │   └── file1.txt #> └── subdir2 #> └── file2.txt #> #> 3 directories, 2 files ``` Zip it up, in all the usual ways: ``` bash zip -r yo-explicit-parent.zip yo/ zip -r yo-implicit-parent.zip yo/* (cd yo && zip -r ../yo-no-parent.zip .) ``` Again, I couldn’t create the DropBox variant locally, so I did it by downloading from DropBox. ``` r # curl::curl_download( # "https://www.dropbox.com/sh/afydxe6pkpz8v6m/AADHbMZAaW3IQ8zppH9mjNsga?dl=1", # destfile = "yo-loose-dropbox.zip" # ) ``` Inspect each in the shell: ``` bash unzip -Z1 yo-explicit-parent.zip #> yo/ #> yo/subdir2/ #> yo/subdir2/file2.txt #> yo/subdir1/ #> yo/subdir1/file1.txt ``` ``` bash unzip -Z1 yo-implicit-parent.zip #> yo/subdir1/ #> yo/subdir1/file1.txt #> yo/subdir2/ #> yo/subdir2/file2.txt ``` ``` bash unzip -Z1 yo-no-parent.zip #> subdir2/ #> subdir2/file2.txt #> subdir1/ #> subdir1/file1.txt ``` ``` bash unzip -Z1 yo-loose-dropbox.zip #> / #> subdir1/file1.txt #> subdir2/file2.txt #> subdir1/ #> subdir2/ ``` usethis/tests/testthat/ref/README.Rmd0000644000176200001440000001421114717524721017126 0ustar liggesusers--- title: "ZIP file structures" output: github_document --- ```{r setup, include=FALSE} knitr::opts_chunk$set(comment = "#>", collapse = TRUE) ``` ```{r} devtools::load_all("~/rrr/usethis") library(fs) ``` ## Different styles of ZIP file usethis has an unexported function `tidy_unzip()`, which is used under the hood in `use_course()` and `use_zip()`. It is a wrapper around `utils::unzip()` that uses some heuristics to choose a good value for `exdir`, which is the "the directory to extract files to." Why do we do this? Because it's really easy to _not_ get the desired result when unpacking a ZIP archive. Common aggravations: * Instead of the unpacked files being corraled within a folder, they explode as "loose parts" into the current working directory. Too little nesting. * The unpacked files are contained in a folder, but that folder itself is contained inside another folder. Too much nesting. `tidy_unzip()` tries to get the nesting just right. Why doesn't unzipping "just work"? Because the people who make `.zip` files make lots of different choices when they actually create the archive and these details aren't baked in, i.e. a successful roundtrip isn't automatic. It usually requires some peeking inside the archive and adjusting the unpack options. This README documents specific `.zip` situations that we anticipate. ## Explicit parent folder Consider the foo folder: ```{bash} tree foo ``` Zip it up like so: ```{bash, eval = FALSE} zip -r foo-explicit-parent.zip foo/ ``` This is the type of ZIP file that we get from GitHub via links of the forms and . Inspect it in the shell: ```{bash} unzip -Z1 foo-explicit-parent.zip ``` Or from R: ```{r} foo_files <- unzip("foo-explicit-parent.zip", list = TRUE) with( foo_files, data.frame(Name = Name, dirname = path_dir(Name), basename = path_file(Name)) ) ``` Note that the folder `foo/` is explicitly included and all of the files are contained in it (in this case, just one file). ## Implicit parent folder Consider the foo folder: ```{bash} tree foo ``` Zip it up like so: ```{bash, eval = FALSE} zip -r foo-implicit-parent.zip foo/* ``` Note the use of `foo/*`, as opposed to `foo` or `foo/`. This type of ZIP file was reported in . The example given there is . Inspect our small example in the shell: ```{bash} unzip -Z1 foo-implicit-parent.zip ``` Or from R: ```{r} foo_files <- unzip("foo-implicit-parent.zip", list = TRUE) with( foo_files, data.frame(Name = Name, dirname = path_dir(Name), basename = path_file(Name)) ) ``` Note that `foo/` is not included and its (original) existence is just implicit in the relative path to, e.g., `foo/file.txt`. Here's a similar look at the example from issue #1961: ```bash ~/rrr/usethis/tests/testthat/ref % unzip -l ~/Downloads/Species\ v2.3.zip Archive: /Users/jenny/Downloads/Species v2.3.zip Length Date Time Name --------- ---------- ----- ---- 1241 04-27-2023 09:16 species_v2/label_encoder.txt 175187560 04-06-2023 15:13 species_v2/model_arch.pt 174953575 04-14-2023 12:28 species_v2/model_weights.pth --------- ------- 350142376 3 files ``` Note also that the implicit parent folder `species_v2` is not the base of the ZIP file name `Species v2.3.zip`. ## No parent Consider the foo folder: ```{bash} tree foo ``` Zip it up like so: ```{bash, eval = FALSE} (cd foo && zip -r ../foo-no-parent.zip .) ``` Note that we are zipping everything in a folder from *inside* the folder. Inspect our small example in the shell: ```{bash} unzip -Z1 foo-no-parent.zip ``` Or from R: ```{r} foo_files <- unzip("foo-no-parent.zip", list = TRUE) with( foo_files, data.frame(Name = Name, dirname = path_dir(Name), basename = path_file(Name)) ) ``` All the files are packaged in the ZIP archive as "loose parts", i.e. there is no explicit or implicit top-level directory. ## No parent, the DropBox Variation This is the structure of ZIP files yielded by DropBox via links of this form . I can't figure out how to even do this with zip locally, so I had to create an example on DropBox and download it. Jim Hester reports it is possible with `archive::archive_write_files()`. It's basically like the "no parent" example above, except it includes a spurious top-level directory `"/"`. Inspect our small example in the shell: ```{bash} unzip -Z1 foo-loose-dropbox.zip ``` Or from R: ```{r} # curl::curl_download( # "https://www.dropbox.com/sh/5qfvssimxf2ja58/AABz3zrpf-iPYgvQCgyjCVdKa?dl=1", # destfile = "foo-loose-dropbox.zip" # ) foo_files <- unzip("foo-loose-dropbox.zip", list = TRUE) with( foo_files, data.frame(Name = Name, dirname = path_dir(Name), basename = path_file(Name)) ) ``` Also note that, when unzipping with `unzip` in the shell, you get this result: ``` Archive: foo-loose-dropbox.zip warning: stripped absolute path spec from / mapname: conversion of failed inflating: file.txt ``` which indicates some tripping over the `/`. Only `file.txt` is left behind. This is a pretty odd ZIP packing strategy. But we need to plan for it. ## Only subdirectories For testing purposes, we also want an example where all the files are inside subdirectories. Examples based on the yo directory here: ```{bash} tree yo ``` Zip it up, in all the usual ways: ```{bash, eval = FALSE} zip -r yo-explicit-parent.zip yo/ zip -r yo-implicit-parent.zip yo/* (cd yo && zip -r ../yo-no-parent.zip .) ``` Again, I couldn't create the DropBox variant locally, so I did it by downloading from DropBox. ```{r eval = FALSE} # curl::curl_download( # "https://www.dropbox.com/sh/afydxe6pkpz8v6m/AADHbMZAaW3IQ8zppH9mjNsga?dl=1", # destfile = "yo-loose-dropbox.zip" # ) ``` Inspect each in the shell: ```{bash} unzip -Z1 yo-explicit-parent.zip ``` ```{bash} unzip -Z1 yo-implicit-parent.zip ``` ```{bash} unzip -Z1 yo-no-parent.zip ``` ```{bash} unzip -Z1 yo-loose-dropbox.zip ``` usethis/tests/testthat/ref/yo-no-parent.zip0000644000176200001440000000131614717524721020603 0ustar liggesusersPK húXsubdir2/UT u¤fu¤fux öPK húX¶ØÌd**subdir2/file2.txtUT u¤fx¤fux öI am file2.txt, located below yo/subdir2/ PK húXsubdir1/UT u¤fu¤fux öPK húXú×=**subdir1/file1.txtUT u¤fx¤fux öI am file1.txt, located below yo/subdir1/ PK húXíAsubdir2/UTu¤fux öPK húX¶ØÌd**¤Bsubdir2/file2.txtUTu¤fux öPK húXíA·subdir1/UTu¤fux öPK húXú×=**¤ùsubdir1/file1.txtUTu¤fux öPKJnusethis/tests/testthat/test-browse.R0000644000176200001440000001016414651000165017341 0ustar liggesuserstest_that("github_url() errors if no project", { withr::local_dir(path_temp()) local_project(NULL, force = TRUE, setwd = TRUE) expect_usethis_error(github_url(), "not.*inside a valid project") }) test_that("github_url() works on active project", { create_local_project() local_interactive(FALSE) use_git() expect_usethis_error(github_url(), "no DESCRIPTION") expect_usethis_error(github_url(), "no GitHub remotes") use_description() proj_desc_field_update("URL", "https://example.com") expect_usethis_error(github_url(), "no GitHub remotes") issues <- "https://github.com/OWNER/REPO_BUGREPORTS/issues" proj_desc_field_update("BugReports", issues) expect_equal(github_url(), "https://github.com/OWNER/REPO_BUGREPORTS") origin <- "https://github.com/OWNER/REPO_ORIGIN" use_git_remote("origin", origin) expect_equal(github_url(), "https://github.com/OWNER/REPO_ORIGIN") }) test_that("github_url() strips everything after USER/REPO", { expect_equal(github_url("usethis"), "https://github.com/r-lib/usethis") expect_equal(github_url("gh"), "https://github.com/r-lib/gh") }) test_that("github_url() has fall back for CRAN packages", { expect_warning(out <- github_url("utils"), "CRAN mirror") expect_equal(out, "https://github.com/cran/utils") }) test_that("github_url() errors for nonexistent package", { expect_usethis_error(github_url("1234"), "Can't find") }) test_that("cran_home() produces canonical URL", { pkg <- create_local_package(file_temp("abc")) expect_match(cran_home(), "https://cran.r-project.org/package=abc") expect_match(cran_home("bar"), "https://cran.r-project.org/package=bar") }) test_that("desc_urls() returns NULL if no project", { withr::local_dir(path_temp()) local_project(NULL, force = TRUE, setwd = TRUE) expect_null(desc_urls()) }) test_that("desc_urls() returns NULL if no DESCRIPTION", { create_local_project() expect_null(desc_urls()) }) test_that("desc_urls() returns empty data frame if no URLs", { create_local_project() use_description() expect_equal( desc_urls(), data.frame( url = character(), desc_field = character(), is_github = logical(), stringsAsFactors = FALSE ) ) }) test_that("desc_urls() returns data frame for locally installed package", { out <- desc_urls("curl") expect_true(nrow(out) > 1) }) test_that("desc_urls() returns data frame for an uninstalled package", { skip_if_offline() pkg <- "devoid" if (requireNamespace(pkg, quietly = TRUE)) { skip(paste0(pkg, " is installed locally")) } out <- desc_urls(pkg) expect_true(nrow(out) > 1) }) test_that("desc_urls() returns NULL for an nonexistent package", { skip_if_offline() expect_null(desc_urls("1234")) }) test_that("browse_XXX() goes to correct URL", { local_interactive(FALSE) g <- function(x) paste0("https://github.com/", x) expect_equal(browse_github("gh"), g("r-lib/gh")) expect_match(browse_github_issues("gh"), g("r-lib/gh/issues")) expect_equal(browse_github_issues("gh", 1), g("r-lib/gh/issues/1")) expect_equal(browse_github_issues("gh", "new"), g("r-lib/gh/issues/new")) expect_match(browse_github_pulls("gh"), g("r-lib/gh/pulls")) expect_equal(browse_github_pulls("gh", 1), g("r-lib/gh/pull/1")) expect_match(browse_github_actions("gh"), g("r-lib/gh/actions")) expect_equal(browse_cran("usethis"), "https://cran.r-project.org/package=usethis") }) test_that("browse_package() errors if no project", { withr::local_dir(path_temp()) local_project(NULL, force = TRUE, setwd = TRUE) expect_usethis_error(browse_project(), "not.*inside a valid project") }) test_that("browse_package() returns URLs", { create_local_project() use_git() expect_equal(browse_package(), character()) origin <- "https://github.com/OWNER/REPO" use_git_remote("origin", origin) foofy <- "https://github.com/SOMEONE_ELSE/REPO" use_git_remote("foofy", foofy) use_description() pkgdown <- "https://example.com" proj_desc_field_update("URL", pkgdown) issues <- "https://github.com/OWNER/REPO/issues" proj_desc_field_update("BugReports", issues) out <- browse_package() expect_setequal(out, c(origin, foofy, pkgdown, issues)) }) usethis/tests/testthat/test-readme.R0000644000176200001440000000325314651000165017276 0ustar liggesuserstest_that("use_readme_md() creates README.md", { create_local_package() use_readme_md() expect_proj_file("README.md") }) test_that("use_readme_rmd() creates README.Rmd", { skip_if_not_installed("rmarkdown") create_local_package() use_readme_rmd() expect_proj_file("README.Rmd") }) test_that("use_readme_rmd() sets up git pre-commit hook if pkg uses git", { skip_if_no_git_user() skip_if_not_installed("rmarkdown") create_local_package() use_git() use_readme_rmd(open = FALSE) expect_proj_file(".git", "hooks", "pre-commit") }) test_that("use_readme_md() has expected form for a non-GitHub package", { skip_if_not_installed("rmarkdown") local_interactive(FALSE) create_local_package() use_readme_md() expect_snapshot(writeLines(read_utf8("README.md")), transform = scrub_testpkg) }) test_that("use_readme_md() has expected form for a GitHub package", { skip_if_not_installed("rmarkdown") local_interactive(FALSE) local_target_repo_spec("OWNER/TESTPKG") create_local_package() use_readme_md() expect_snapshot(writeLines(read_utf8("README.md")), transform = scrub_testpkg) }) test_that("use_readme_rmd() has expected form for a non-GitHub package", { skip_if_not_installed("rmarkdown") local_interactive(FALSE) create_local_package() use_readme_rmd() expect_snapshot(writeLines(read_utf8("README.Rmd")), transform = scrub_testpkg) }) test_that("use_readme_rmd() has expected form for a GitHub package", { skip_if_not_installed("rmarkdown") local_interactive(FALSE) local_target_repo_spec("OWNER/TESTPKG") create_local_package() use_readme_rmd() expect_snapshot(writeLines(read_utf8("README.Rmd")), transform = scrub_testpkg) }) usethis/tests/testthat/test-vignette.R0000644000176200001440000000656214721145632017703 0ustar liggesusers# use_vignette ------------------------------------------------------------ test_that("use_vignette() requires a package", { create_local_project() expect_usethis_error(use_vignette(), "not an R package") }) test_that("use_vignette() gives useful errors", { create_local_package() expect_snapshot(error = TRUE, { use_vignette() use_vignette("bad name") }) }) test_that("use_vignette() does the promised setup, Rmd", { create_local_package() use_vignette("name", "title") expect_proj_file("vignettes/name.Rmd") ignores <- read_utf8(proj_path(".gitignore")) expect_true("inst/doc" %in% ignores) deps <- proj_deps() expect_true( all(c("knitr", "rmarkdown") %in% deps$package[deps$type == "Suggests"]) ) expect_identical(proj_desc()$get_field("VignetteBuilder"), "knitr") }) test_that("use_vignette() does the promised setup, qmd", { create_local_package() local_check_installed() use_vignette("name.qmd", "title") expect_proj_file("vignettes/name.qmd") ignores <- read_utf8(proj_path(".gitignore")) expect_true("inst/doc" %in% ignores) deps <- proj_deps() expect_true( all(c("knitr", "quarto") %in% deps$package[deps$type == "Suggests"]) ) expect_identical(proj_desc()$get_field("VignetteBuilder"), "quarto") }) test_that("use_vignette() does the promised setup, mix of Rmd and qmd", { create_local_package() local_check_installed() use_vignette("older-vignette", "older Rmd vignette") use_vignette("newer-vignette.qmd", "newer qmd vignette") expect_proj_file("vignettes/older-vignette.Rmd") expect_proj_file("vignettes/newer-vignette.qmd") deps <- proj_deps() expect_true( all(c("knitr", "quarto", "rmarkdown") %in% deps$package[deps$type == "Suggests"]) ) vignette_builder <- proj_desc()$get_field("VignetteBuilder") expect_match(vignette_builder, "knitr", fixed = TRUE) expect_match(vignette_builder, "quarto", fixed = TRUE) }) # use_article ------------------------------------------------------------- test_that("use_article() does the promised setup, Rmd", { create_local_package() local_interactive(FALSE) # Let's have another package already in Config/Needs/website proj_desc_field_update("Config/Needs/website", "somepackage") use_article("name", "title") expect_proj_file("vignettes/articles/name.Rmd") expect_setequal( proj_desc()$get_list("Config/Needs/website"), c("rmarkdown", "somepackage") ) }) # Note that qmd articles seem to cause problems for build_site() rn # https://github.com/r-lib/pkgdown/issues/2821 test_that("use_article() does the promised setup, qmd", { create_local_package() local_check_installed() local_interactive(FALSE) # Let's have another package already in Config/Needs/website proj_desc_field_update("Config/Needs/website", "somepackage") use_article("name.qmd", "title") expect_proj_file("vignettes/articles/name.qmd") expect_setequal( proj_desc()$get_list("Config/Needs/website"), c("quarto", "somepackage") ) }) # helpers ----------------------------------------------------------------- test_that("valid_vignette_name() works", { expect_true(valid_vignette_name("perfectly-valid-name")) expect_false(valid_vignette_name("01-test")) expect_false(valid_vignette_name("test.1")) }) test_that("we error informatively for bad vignette extension", { expect_snapshot( error = TRUE, check_vignette_extension("Rnw") ) }) usethis/tests/testthat/test-data.R0000644000176200001440000000551214717524762016773 0ustar liggesuserstest_that("use_data() errors for a non-package project", { create_local_project() expect_usethis_error(use_data(letters), "not an R package") }) test_that("use_data() stores new, non-internal data", { pkg <- create_local_package() letters2 <- letters month.abb2 <- month.abb expect_false(desc::desc_has_fields("LazyData")) use_data(letters2, month.abb2) expect_true(desc::desc_has_fields("LazyData")) rm(letters2, month.abb2) load(proj_path("data", "letters2.rda")) load(proj_path("data", "month.abb2.rda")) expect_identical(letters2, letters) expect_identical(month.abb2, month.abb) }) test_that("use_data() honors `overwrite` for non-internal data", { pkg <- create_local_package() letters2 <- letters use_data(letters2) expect_usethis_error(use_data(letters2), ".*data/letters2.rda.* already exist") letters2 <- rev(letters) use_data(letters2, overwrite = TRUE) load(proj_path("data", "letters2.rda")) expect_identical(letters2, rev(letters)) }) test_that("use_data() stores new internal data", { pkg <- create_local_package() letters2 <- letters month.abb2 <- month.abb use_data(letters2, month.abb2, internal = TRUE) rm(letters2, month.abb2) load(proj_path("R", "sysdata.rda")) expect_identical(letters2, letters) expect_identical(month.abb2, month.abb) }) test_that("use_data() honors `overwrite` for internal data", { pkg <- create_local_package() letters2 <- letters use_data(letters2, internal = TRUE) rm(letters2) expect_usethis_error( use_data(letters2, internal = TRUE), ".*R/sysdata.rda.* already exist" ) letters2 <- rev(letters) use_data(letters2, internal = TRUE, overwrite = TRUE) load(proj_path("R", "sysdata.rda")) expect_identical(letters2, rev(letters)) }) test_that("use_data() writes version 3 by default", { create_local_package() x <- letters use_data(x, internal = TRUE, compress = FALSE) expect_identical( rawToChar(readBin(proj_path("R", "sysdata.rda"), n = 4, what = "raw")), "RDX3" ) }) test_that("use_data() can enforce `ascii = TRUE`", { create_local_package() x <- "h\u00EF" use_data(x) expect_false(tools::checkRdaFiles("data/x.rda")[["ASCII"]]) use_data(x, ascii = TRUE, overwrite = TRUE) expect_true(tools::checkRdaFiles("data/x.rda")[["ASCII"]]) }) test_that("use_data_raw() does setup", { create_local_package() use_data_raw(open = FALSE) expect_proj_file(path("data-raw", "DATASET.R")) use_data_raw("daisy", open = FALSE) expect_proj_file(path("data-raw", "daisy.R")) expect_true(is_build_ignored("^data-raw$")) }) test_that("use_data() does not decrease minimum version of R itself", { create_local_package() use_package("R", "depends", "4.1") original_minimum_r_version <- pkg_minimum_r_version() use_data(letters) expect_true(pkg_minimum_r_version() >= original_minimum_r_version) }) usethis/tests/testthat/test-github_token.R0000644000176200001440000000126114651000165020520 0ustar liggesuserstest_that("code_hint_with_host() works", { expect_identical(code_hint_with_host("foo"), "foo()") expect_identical(code_hint_with_host("foo", arg_name = "arg"), "foo()") host_github <- "https://api.github.com" expect_identical(code_hint_with_host("foo", host = host_github), "foo()") expect_identical( code_hint_with_host("foo", host = host_github, arg_name = "arg"), "foo()" ) host_ghe <- "https://github.acme.com" expect_identical( code_hint_with_host("foo", host = host_ghe), 'foo("https://github.acme.com")' ) expect_identical( code_hint_with_host("foo", host = host_ghe, arg_name = "arg"), 'foo(arg = \"https://github.acme.com\")' ) }) usethis/tests/testthat/test-directory.R0000644000176200001440000000144114651000165020042 0ustar liggesuserstest_that("create_directory() doesn't bother a pre-existing target dir", { tmp <- file_temp() dir_create(tmp) expect_true(is_dir(tmp)) expect_no_error(create_directory(tmp)) expect_true(is_dir(tmp)) }) test_that("create_directory() creates a directory", { tmp <- file_temp("yes") create_directory(tmp) expect_true(is_dir(tmp)) }) # check_path_is_directory ------------------------------------------------- test_that("no false positive for trailing slash", { pwd <- sub("/$", "", getwd()) expect_no_error(check_path_is_directory(paste0(pwd, "/"))) }) test_that("symlink to directory is directory", { base <- dir_create(file_temp()) base_a <- dir_create(path(base, "a")) base_b <- link_create(base_a, path(base, "b")) expect_no_error(check_path_is_directory(base_b)) }) usethis/tests/testthat/test-upkeep.R0000644000176200001440000000443414717524762017355 0ustar liggesuserstest_that("tidy upkeep bullets don't change accidentally", { create_local_package() use_mit_license() expect_equal(last_upkeep_year(), 2000L) local_mocked_bindings( Sys.Date = function() as.Date("2025-01-01"), usethis_version = function() "1.1.0", author_has_rstudio_email = function() TRUE, is_posit_pkg = function() TRUE, is_posit_person_canonical = function() FALSE ) expect_snapshot(writeLines(tidy_upkeep_checklist())) }) test_that("tidy upkeep omits bullets present in last_upkeep", { create_local_package() use_mit_license() expect_equal(last_upkeep_year(), 2000L) record_upkeep_date(as.Date("2023-04-04")) expect_equal(last_upkeep_year(), 2023L) local_mocked_bindings( Sys.Date = function() as.Date("2025-01-01"), usethis_version = function() "1.1.0", author_has_rstudio_email = function() TRUE, is_posit_pkg = function() TRUE, is_posit_person_canonical = function() FALSE ) expect_snapshot(writeLines(tidy_upkeep_checklist())) }) test_that("upkeep bullets don't change accidentally",{ skip_if_no_git_user() create_local_package() local_mocked_bindings( Sys.Date = function() as.Date("2023-01-01"), usethis_version = function() "1.1.0", git_default_branch = function() "main" ) expect_snapshot(writeLines(upkeep_checklist())) # Test some conditional TODOs use_code_of_conduct("jane.doe@foofymail.com") writeLines("# test environment\n", "cran-comments.md") local_mocked_bindings(git_default_branch = function() "master") # Look like a package that hasn't switched to testthat 3e yet use_testthat() desc::desc_del("Config/testthat/edition") desc::desc_del("Suggests") use_package("testthat", "Suggests") # previously (withr 2.5.0) we could put local_edition(2L) inside {..} inside # the expect_snapshot() call # that is no longer true with withr 3.0.0, but this hacktastic approach works local({ local_edition(2L) checklist <<- upkeep_checklist() }) expect_snapshot(writeLines(checklist)) }) test_that("get extra upkeep bullets works", { e <- new.env(parent = empty_env()) expect_equal(upkeep_extra_bullets(e), "") e$upkeep_bullets <- function() c("extra", "upkeep bullets") expect_equal( upkeep_extra_bullets(e), c("* [ ] extra", "* [ ] upkeep bullets", "") ) }) usethis/tests/testthat/test-description.R0000644000176200001440000000773114651000165020371 0ustar liggesusers # use_description_defaults() ---------------------------------------------- test_that("user's fields > usethis defaults", { d <- use_description_defaults("pkg", fields = list(Title = "TEST1", URL = "TEST1")) expect_equal(d$Title, "TEST1") expect_equal(d$URL, "TEST1") expect_equal(d$Version, "0.0.0.9000") }) test_that("usethis options > usethis defaults", { withr::local_options(list( usethis.description = list(License = "TEST") )) d <- use_description_defaults() expect_equal(d$License, "TEST") expect_equal(d$Version, "0.0.0.9000") }) test_that("usethis options > usethis defaults, even for Authors@R", { withr::local_options(list( usethis.description = list( "Authors@R" = utils::person("Jane", "Doe") ) )) d <- use_description_defaults() expect_equal( d$`Authors@R`, "person(given = \"Jane\",\n family = \"Doe\")" ) expect_match(d$`Authors@R`, '^person[(]given = "Jane"') expect_match(d$`Authors@R`, '"Doe"[)]$') }) test_that("user's fields > options > defaults", { withr::local_options(list( usethis.description = list(License = "TEST1", Title = "TEST1") )) d <- use_description_defaults("pkg", fields = list(Title = "TEST2")) expect_equal(d$Title, "TEST2") expect_equal(d$License, "TEST1") expect_equal(d$Version, "0.0.0.9000") }) test_that("automatically converts person object to text", { d <- use_description_defaults( "pkg", fields = list(`Authors@R` = person("H", "W")) ) expect_match(d$`Authors@R`, '^person[(]given = "H"') expect_match(d$`Authors@R`, '"W"[)]$') }) test_that("can set package", { d <- use_description_defaults(package = "TEST") expect_equal(d$Package, "TEST") }) test_that("`roxygen = FALSE` is honoured", { d <- use_description_defaults(roxygen = FALSE) expect_null(d[["Roxygen"]]) expect_null(d[["RoxygenNote"]]) }) # use_description --------------------------------------------------------- test_that("creation succeeds even if options are broken", { withr::local_options(list(usethis.description = list( `Authors@R` = "person(" ))) create_local_project() expect_error(use_description(), NA) }) test_that("default description is tidy", { withr::local_options(list(usethis.description = NULL, devtools.desc = NULL)) create_local_package() before <- readLines(proj_path("DESCRIPTION")) use_tidy_description() after <- readLines(proj_path("DESCRIPTION")) expect_equal(before, after) }) test_that("valid CRAN names checked", { withr::local_options(list(usethis.description = NULL, devtools.desc = NULL)) create_local_package(dir = file_temp(pattern = "invalid_pkg_name")) expect_error(use_description(check_name = FALSE), NA) expect_error( use_description(check_name = TRUE), "is not a valid package name", class = "usethis_error" ) }) test_that("proj_desc_field_update() can address an existing field", { pkg <- create_local_package() orig <- tools::md5sum(proj_path("DESCRIPTION")) ## specify existing value of existing field --> should be no op proj_desc_field_update( key = "Version", value = proj_version(), overwrite = FALSE ) expect_identical(orig, tools::md5sum(proj_path("DESCRIPTION"))) expect_usethis_error( proj_desc_field_update( key = "Version", value = "1.1.1", overwrite = FALSE ), "has a different value" ) ## overwrite existing field proj_desc_field_update( key = "Version", value = "1.1.1", overwrite = TRUE ) expect_identical(proj_version(), "1.1.1") }) test_that("proj_desc_field_update() can add new field", { pkg <- create_local_package() proj_desc_field_update(key = "foo", value = "bar") expect_identical(proj_desc()$get_field("foo"), "bar") }) test_that("proj_desc_field_update() ignores whitespace", { pkg <- create_local_package() proj_desc_field_update(key = "foo", value = "\n bar") proj_desc_field_update(key = "foo", value = "bar", overwrite = FALSE) expect_identical(proj_desc()$get_field("foo", trim_ws = FALSE), "\n bar") }) usethis/tests/testthat/test-cran.R0000644000176200001440000000055514651000165016766 0ustar liggesuserstest_that("use_cran_comments() requires a package", { create_local_project() expect_usethis_error(use_cran_comments(), "not an R package") }) test_that("use_cran_comments() creates and ignores the promised file", { create_local_package() use_cran_comments() expect_proj_file("cran-comments.md") expect_true(is_build_ignored("^cran-comments\\.md$")) }) usethis/tests/testthat/test-utils-git.R0000644000176200001440000000422214651000165017757 0ustar liggesusers# Branch ------------------------------------------------------------------ test_that("git_branch() works", { skip_if_no_git_user() create_local_project() expect_usethis_error(git_branch(), "Cannot detect") git_init() expect_usethis_error(git_branch(), "unborn branch") writeLines("blah", proj_path("blah.txt")) gert::git_add("blah.txt", repo = git_repo()) gert::git_commit("Make one commit", repo = git_repo()) # branch name can depend on user's config, e.g. could be 'master' or 'main' expect_no_error( b <- git_branch() ) expect_true(nzchar(b)) }) # Protocol ------------------------------------------------------------------ test_that("git_protocol() catches bad input from usethis.protocol option", { withr::with_options( list(usethis.protocol = "nope"), { expect_usethis_error(git_protocol(), "must be either") expect_null(getOption("usethis.protocol")) } ) withr::with_options( list(usethis.protocol = c("ssh", "https")), { expect_usethis_error(git_protocol(), "must be either") expect_null(getOption("usethis.protocol")) } ) }) test_that("use_git_protocol() errors for bad input", { expect_usethis_error(use_git_protocol("nope"), "must be either") }) test_that("git_protocol() defaults to 'https'", { withr::with_options( list(usethis.protocol = NULL), expect_identical(git_protocol(), "https") ) }) test_that("git_protocol() honors, vets, and lowercases the option", { withr::with_options( list(usethis.protocol = "ssh"), expect_identical(git_protocol(), "ssh") ) withr::with_options( list(usethis.protocol = "SSH"), expect_identical(git_protocol(), "ssh") ) withr::with_options( list(usethis.protocol = "https"), expect_identical(git_protocol(), "https") ) withr::with_options( list(usethis.protocol = "nope"), expect_usethis_error(git_protocol(), "must be either") ) }) test_that("use_git_protocol() prioritizes and lowercases direct input", { withr::with_options( list(usethis.protocol = "ssh"), { expect_identical(use_git_protocol("HTTPS"), "https") expect_identical(git_protocol(), "https") } ) }) usethis/tests/testthat/test-author.R0000644000176200001440000000434114717524762017363 0ustar liggesuserstest_that("Can add an author and then another", { withr::local_options(usethis.description = NULL) create_local_package() local_interactive(FALSE) use_author( "Jennifer", "Bryan", email = "jenny@posit.co", comment = c(ORCID = "0000-0002-6983-2759") ) d <- proj_desc() ctb <- d$get_author(role = "ctb") expect_equal(ctb$given, "Jennifer") expect_equal(ctb$family, "Bryan") expect_equal(ctb$email, "jenny@posit.co") expect_equal(ctb$comment, c(ORCID = "0000-0002-6983-2759")) use_author( "Hadley", "Wickham", email = "hadley@posit.co", role = c("rev", "fnd") ) d <- proj_desc() rev <- d$get_author(role = "rev") fnd <- d$get_author(role = "fnd") expect_equal(rev$given, "Hadley") expect_equal(rev$family, "Wickham") expect_equal(fnd$given, "Hadley") expect_equal(fnd$family, "Wickham") }) test_that("Legacy author fields are challenged", { withr::local_options(usethis.description = NULL) create_local_package() d <- proj_desc() # I'm sort of deliberately leaving Authors@R there, just to make things # even less ideal. But one could do: # d$del("Authors@R") # used BH as of 2023-04-19 as my example of a package that uses # Author and Maintainer and does not use Authors@R d$set(Maintainer = "Dirk Eddelbuettel ") d$set(Author = "Dirk Eddelbuettel, John W. Emerson and Michael J. Kane") d$write() local_interactive(FALSE) withr::local_options(usethis.quiet = FALSE) expect_snapshot(challenge_legacy_author_fields(), error = TRUE) }) test_that("Decline to tweak an existing author", { withr::local_options( usethis.description = list( "Authors@R" = utils::person( "Jennifer", "Bryan", email = "jenny@posit.co", role = c("aut", "cre"), comment = c(ORCID = "0000-0002-6983-2759") ) ) ) create_local_package() withr::local_options(usethis.quiet = FALSE) expect_snapshot( use_author("Jennifer", "Bryan", role = "cph"), error = TRUE ) }) test_that("Placeholder author is challenged", { withr::local_options(usethis.description = NULL) create_local_package() local_interactive(FALSE) withr::local_options(usethis.quiet = FALSE) expect_snapshot(use_author("Charlie", "Brown")) }) usethis/tests/testthat/test-roxygen.R0000644000176200001440000000211414717717764017556 0ustar liggesuserstest_that("use_package_doc() compatible with roxygen_ns_append()", { create_local_package() withr::local_options(list(usethis.quiet = FALSE, crayon.enabled = FALSE)) expect_snapshot(use_package_doc(), transform = scrub_testpkg) expect_snapshot(roxygen_ns_append("test"), transform = scrub_testpkg) expect_silent(roxygen_ns_append("test")) }) test_that("use_roxygen_md() adds DESCRIPTION fields to naive package", { skip_if_not_installed("roxygen2") pkg <- create_local_package() use_roxygen_md() desc <- proj_desc() expect_equal(desc$get("Roxygen"), c(Roxygen = "list(markdown = TRUE)")) expect_true(desc$has_fields("RoxygenNote")) expect_true(uses_roxygen_md()) }) test_that("use_roxygen_md() finds 'markdown = TRUE' in presence of other stuff", { skip_if_not_installed("roxygen2") pkg <- create_local_package() desc::desc_set(Roxygen = 'list(markdown = TRUE, r6 = FALSE, load = "source", roclets = c("collate", "namespace", "rd", "roxyglobals::global_roclet"))') local_check_installed() expect_no_error(use_roxygen_md()) expect_true(uses_roxygen_md()) }) usethis/tests/testthat/test-documentation.R0000644000176200001440000000057314651000165020714 0ustar liggesuserstest_that("use_package_doc() requires a package", { create_local_project() expect_false(has_package_doc()) expect_usethis_error(use_package_doc(), "not an R package") }) test_that("use_package_doc() creates the promised file", { create_local_package() use_package_doc() expect_proj_file("R", paste0(project_name(), "-package.R")) expect_true(has_package_doc()) }) usethis/tests/testthat/test-rstudio.R0000644000176200001440000000655414651000165017541 0ustar liggesuserstest_that("use_rstudio() creates .Rproj file, named after directory", { dir <- create_local_package(rstudio = FALSE) use_rstudio() rproj <- path_file(dir_ls(proj_get(), regexp = "[.]Rproj$")) expect_identical(path_ext_remove(rproj), path_file(dir)) # Always uses POSIX line endings expect_equal(proj_line_ending(), "\n") }) test_that("use_rstudio() can opt-out of reformatting", { create_local_project(rstudio = FALSE) use_rstudio(reformat = FALSE) out <- readLines(rproj_path()) expect_true(is.na(match("AutoAppendNewline", out))) expect_true(is.na(match("StripTrailingWhitespace", out))) expect_true(is.na(match("LineEndingConversion", out))) }) test_that("use_rstudio() omits package-related config for a project", { create_local_project(rstudio = FALSE) use_rstudio() out <- readLines(rproj_path()) expect_true(is.na(match("BuildType: Package", out))) }) test_that("an RStudio project is recognized", { create_local_package(rstudio = TRUE) expect_true(is_rstudio_project()) expect_match(rproj_path(), "\\.Rproj$") }) test_that("we error if there isn't exactly one Rproj files", { dir <- withr::local_tempdir() path <- dir_create(path(dir, "test")) expect_snapshot(rproj_path(path), error = TRUE) file_touch(path(path, "a.Rproj")) file_touch(path(path, "b.Rproj")) expect_snapshot(rproj_path(path), error = TRUE) }) test_that("a non-RStudio project is not recognized", { create_local_package(rstudio = FALSE) expect_false(is_rstudio_project()) expect_error(rproj_path(), NA_character_) }) test_that("Rproj is parsed (actually, only colon-containing lines)", { tmp <- withr::local_tempfile() writeLines(c("a: a", "", "b: b", "I have no colon"), tmp) expect_identical( parse_rproj(tmp), list(a = "a", "", b = "b", "I have no colon") ) }) test_that("Existing field(s) in Rproj can be modified", { tmp <- withr::local_tempfile() writeLines( c( "Version: 1.0", "", "RestoreWorkspace: Default", "SaveWorkspace: Yes", "AlwaysSaveHistory: Default" ), tmp ) before <- parse_rproj(tmp) delta <- list(RestoreWorkspace = "No", SaveWorkspace = "No") after <- modify_rproj(tmp, delta) expect_identical(before[c(1, 2, 5)], after[c(1, 2, 5)]) expect_identical(after[3:4], delta) }) test_that("we can roundtrip an Rproj file", { create_local_package(rstudio = TRUE) rproj_file <- rproj_path() before <- read_utf8(rproj_file) rproj <- modify_rproj(rproj_file, list()) writeLines(serialize_rproj(rproj), rproj_file) after <- read_utf8(rproj_file) expect_identical(before, after) }) test_that("use_blank_state('project') modifies Rproj", { create_local_package(rstudio = TRUE) use_blank_slate("project") rproj <- parse_rproj(rproj_path()) expect_equal(rproj$RestoreWorkspace, "No") expect_equal(rproj$SaveWorkspace, "No") }) test_that("use_blank_state() modifies user-level RStudio prefs", { path <- withr::local_tempdir() withr::local_envvar(c("XDG_CONFIG_HOME" = path)) use_blank_slate() prefs <- rstudio_prefs_read() expect_equal(prefs[["save_workspace"]], "never") expect_false(prefs[["load_workspace"]]) }) test_that("use_rstudio_preferences", { path <- withr::local_tempdir() withr::local_envvar(c("XDG_CONFIG_HOME" = path)) use_rstudio_preferences(x = 1, y = "a") prefs <- rstudio_prefs_read() expect_equal(prefs$x, 1) expect_equal(prefs$y, "a") }) usethis/tests/testthat/test-rcpp.R0000644000176200001440000000517314651000165017010 0ustar liggesuserstest_that("use_rcpp() requires a package", { create_local_project() expect_usethis_error(use_rcpp(), "not an R package") }) test_that("use_rcpp() creates files/dirs, edits DESCRIPTION and .gitignore", { create_local_package() use_roxygen_md() # pretend Rcpp is installed local_check_installed() use_rcpp("test") expect_match(desc::desc_get("LinkingTo"), "Rcpp") expect_match(desc::desc_get("Imports"), "Rcpp") expect_proj_dir("src") expect_proj_file("src", "test.cpp") ignores <- read_utf8(proj_path("src", ".gitignore")) expect_true(all(c("*.o", "*.so", "*.dll") %in% ignores)) }) test_that("use_rcpp_armadillo() creates Makevars files and edits DESCRIPTION", { create_local_package() use_roxygen_md() local_interactive(FALSE) # pretend RcppArmadillo is installed local_check_installed() use_rcpp_armadillo("code") expect_proj_file("src", "code.cpp") expect_match(desc::desc_get("LinkingTo"), "RcppArmadillo") expect_proj_file("src", "Makevars") expect_proj_file("src", "Makevars.win") }) test_that("use_rcpp_eigen() edits DESCRIPTION", { create_local_package() use_roxygen_md() # pretend RcppArmadillo is installed local_check_installed() use_rcpp_eigen("code") expect_proj_file("src", "code.cpp") expect_match(desc::desc_get("LinkingTo"), "RcppEigen") }) test_that("use_src() doesn't message if not needed", { create_local_package() use_roxygen_md() use_package_doc() use_src() withr::local_options(list(usethis.quiet = FALSE)) expect_silent(use_src()) }) test_that("use_makevars() respects pre-existing Makevars", { pkg <- create_local_package() dir_create(proj_path("src")) makevars_file <- proj_path("src", "Makevars") makevars_win_file <- proj_path("src", "Makevars.win") writeLines("USE_CXX = CXX11", makevars_file) file_copy(makevars_file, makevars_win_file) before_makevars_file <- read_utf8(makevars_file) before_makevars_win_file <- read_utf8(makevars_win_file) makevars_settings <- list( "PKG_CXXFLAGS" = "-Wno-reorder" ) use_makevars(makevars_settings) expect_identical(before_makevars_file, read_utf8(makevars_file)) expect_identical(before_makevars_win_file, read_utf8(makevars_win_file)) }) test_that("use_makevars() creates Makevars files with appropriate configuration", { pkg <- create_local_package() makevars_settings <- list( "CXX_STD" = "CXX11" ) use_makevars(makevars_settings) makevars_content <- paste0(names(makevars_settings), " = ", makevars_settings) expect_identical(makevars_content, read_utf8(proj_path("src", "Makevars"))) expect_identical(makevars_content, read_utf8(proj_path("src", "Makevars.win"))) }) usethis/tests/testthat/_snaps/0000755000176200001440000000000014721145632016230 5ustar liggesusersusethis/tests/testthat/_snaps/helpers.md0000644000176200001440000000374414720402072020215 0ustar liggesusers# we message for new type and are silent for same type Code use_dependency("crayon", "Imports") Message v Adding crayon to 'Imports' field in DESCRIPTION. # we message for version change and are silent for same version Code use_dependency("crayon", "Imports") Message v Adding crayon to 'Imports' field in DESCRIPTION. --- Code use_dependency("crayon", "Imports", min_version = "1.0.0") Message v Increasing crayon version to ">= 1.0.0" in DESCRIPTION. --- Code use_dependency("crayon", "Imports", min_version = "2.0.0") Message v Increasing crayon version to ">= 2.0.0" in DESCRIPTION. --- Code use_dependency("crayon", "Imports", min_version = "1.0.0") Message v Decreasing crayon version to ">= 1.0.0" in DESCRIPTION. # use_dependency() upgrades a dependency Code use_dependency("usethis", "Suggests") Message v Adding usethis to 'Suggests' field in DESCRIPTION. --- Code use_dependency("usethis", "Imports") Message v Moving usethis from 'Suggests' to 'Imports' field in DESCRIPTION. # use_dependency() declines to downgrade a dependency Code use_dependency("usethis", "Imports") Message v Adding usethis to 'Imports' field in DESCRIPTION. --- Code use_dependency("usethis", "Suggests") Message ! Package usethis is already listed in 'Imports' in DESCRIPTION; no change made. # can add LinkingTo dependency if other dependency already exists Code use_dependency("rlang", "LinkingTo") Message v Adding rlang to 'LinkingTo' field in DESCRIPTION. # use_dependency() does not fall over on 2nd LinkingTo request Code use_dependency("rlang", "LinkingTo") # use_dependency() can level up a LinkingTo dependency Code use_package("rlang") Message v Moving rlang from 'Suggests' to 'Imports' field in DESCRIPTION. [ ] Refer to functions with `rlang::fun()`. usethis/tests/testthat/_snaps/data-table.md0000644000176200001440000000314614720402070020543 0ustar liggesusers# use_data_table() Imports data.table Code roxygen_ns_show() Output [1] "#' @importFrom data.table .BY" [2] "#' @importFrom data.table .EACHI" [3] "#' @importFrom data.table .GRP" [4] "#' @importFrom data.table .I" [5] "#' @importFrom data.table .N" [6] "#' @importFrom data.table .NGRP" [7] "#' @importFrom data.table .SD" [8] "#' @importFrom data.table :=" [9] "#' @importFrom data.table data.table" # use_data_table() blocks use of Depends Code use_data_table() Message ! data.table should be in 'Imports' or 'Suggests', not 'Depends'! v Removing data.table from 'Depends'. v Adding data.table to 'Imports' field in DESCRIPTION. v Adding "@importFrom data.table data.table", "@importFrom data.table :=", "@importFrom data.table .SD", "@importFrom data.table .BY", "@importFrom data.table .N", "@importFrom data.table .I", "@importFrom data.table .GRP", "@importFrom data.table .NGRP", and "@importFrom data.table .EACHI" to 'R/{TESTPKG}-package.R'. --- Code roxygen_ns_show() Output [1] "#' @importFrom data.table .BY" [2] "#' @importFrom data.table .EACHI" [3] "#' @importFrom data.table .GRP" [4] "#' @importFrom data.table .I" [5] "#' @importFrom data.table .N" [6] "#' @importFrom data.table .NGRP" [7] "#' @importFrom data.table .SD" [8] "#' @importFrom data.table :=" [9] "#' @importFrom data.table data.table" usethis/tests/testthat/_snaps/tibble.md0000644000176200001440000000051114720402075020004 0ustar liggesusers# use_tibble() Imports tibble and imports tibble::tibble() Code use_tibble() Message v Adding tibble to 'Imports' field in DESCRIPTION. v Adding "@importFrom tibble tibble" to 'R/{TESTPKG}-package.R'. [ ] Document a returned tibble like so: #' @return a [tibble][tibble::tibble-package] usethis/tests/testthat/_snaps/version.md0000644000176200001440000000151614720402100020223 0ustar liggesusers# use_version() errors for invalid `which` Code use_version("1.2.3") Condition Error in `choose_version()`: ! `which` must be one of "major", "minor", "patch", or "dev", not "1.2.3". # use_version() increments version in DESCRIPTION, edits NEWS Code writeLines(read_utf8(proj_path("NEWS.md"))) Output # {TESTPKG} 2.0.0 * Added a `NEWS.md` file to track changes to the package. # use_version() updates (development version) directly Code writeLines(read_utf8(proj_path("NEWS.md"))) Output # {TESTPKG} 0.0.2 # {TESTPKG} 0.0.1 * Added a `NEWS.md` file to track changes to the package. # use_version() updates version.c Code writeLines(lines) Output foo; const char {TESTPKG}_version = "1.0.0.9000"; bar; usethis/tests/testthat/_snaps/logo.md0000644000176200001440000000120414720402073017501 0ustar liggesusers# use_logo() shows a clickable path with README Code use_logo("logo.png") Message v Creating 'man/figures/'. v Resized 'logo.png' to 240x278. [ ] Add logo to 'README.md' with the following html: # {TESTPKG} # use_logo() writes a file in lowercase and it knows that Code use_logo("LoGo.PNG") Message v Creating 'man/figures/'. v Resized 'LoGo.PNG' to 240x278. [ ] Add logo to your README with the following html: # {TESTPKG} usethis/tests/testthat/_snaps/tutorial.md0000644000176200001440000000047614720402075020420 0ustar liggesusers# use_tutorial() checks its inputs Code use_tutorial() Condition Error in `use_tutorial()`: ! `name` must be a valid name, not absent. --- Code use_tutorial(name = "tutorial-file") Condition Error in `use_tutorial()`: ! `title` must be a valid name, not absent. usethis/tests/testthat/_snaps/use_import_from.md0000644000176200001440000000130514720402076021757 0ustar liggesusers# use_import_from() adds one line for each function Code roxygen_ns_show() Output [1] "#' @importFrom lifecycle deprecate_stop" [2] "#' @importFrom lifecycle deprecate_warn" # use_import_from() generates helpful errors Code use_import_from(1) Condition Error in `use_import_from()`: x `package` must be a single string. Code use_import_from(c("desc", "rlang")) Condition Error in `use_import_from()`: x `package` must be a single string. Code use_import_from("desc", "pool_noodle") Condition Error in `map2()`: i In index: 1. Caused by error in `.f()`: x Can't find `desc::pool_noodle()`. usethis/tests/testthat/_snaps/pkgdown.md0000644000176200001440000000130214720402073020211 0ustar liggesusers# use_pkgdown() creates and ignores the promised file/dir Code use_pkgdown() Message v Adding "^_pkgdown\\.yml$", "^docs$", and "^pkgdown$" to '.Rbuildignore'. v Adding "docs" to '.gitignore'. v Writing '_pkgdown.yml'. [ ] Edit '_pkgdown.yml'. # pkgdown_url() returns correct data, warns if pedantic Code pkgdown_url(pedantic = TRUE) Message ! pkgdown config does not specify the site's 'url', which is optional but recommended. Output NULL --- Code pkgdown_url(pedantic = TRUE) Message ! pkgdown config does not specify the site's 'url', which is optional but recommended. Output NULL usethis/tests/testthat/_snaps/roxygen.md0000644000176200001440000000054314720402075020243 0ustar liggesusers# use_package_doc() compatible with roxygen_ns_append() Code use_package_doc() Message v Writing 'R/{TESTPKG}-package.R'. [ ] Run `devtools::document()` to update package-level documentation. --- Code roxygen_ns_append("test") Message v Adding "test" to 'R/{TESTPKG}-package.R'. Output [1] TRUE usethis/tests/testthat/_snaps/course.md0000644000176200001440000000203714720402070020043 0ustar liggesusers# download_url() retry logic works as advertised Code out <- download_url(url = "URL", destfile = "destfile") --- Code out <- download_url(url = "URL", destfile = "destfile") Message i Retrying download ... attempt 2. --- Code out <- download_url(url = "URL", destfile = "destfile") Message i Retrying download ... attempt 2. i Retrying download ... attempt 3. --- Code out <- download_url(url = "URL", destfile = "destfile", n_tries = 3) Message i Retrying download ... attempt 2. i Retrying download ... attempt 3. Condition Error: ! try 3 --- Code out <- download_url(url = "URL", destfile = "destfile", n_tries = 10) Message i Retrying download ... attempt 2. i Retrying download ... attempt 3. i Retrying download ... attempt 4. # normalize_url() prepends https:// (or not) Code normalize_url(1) Condition Error in `normalize_url()`: ! `url` must be a valid name, not the number 1. usethis/tests/testthat/_snaps/package.md0000644000176200001440000000323514720402073020142 0ustar liggesusers# use_package() guides new packages but not pre-existing ones Code use_package("withr") Message v Adding withr to 'Imports' field in DESCRIPTION. [ ] Refer to functions with `withr::fun()`. Code use_package("withr") use_package("withr", "Suggests") Message ! Package withr is already listed in 'Imports' in DESCRIPTION; no change made. # use_package() handles R versions with aplomb Code use_package("R") Condition Error in `use_dependency()`: x Set `type = "Depends"` when specifying an R version. --- Code use_package("R", type = "Depends") Condition Error in `use_dependency()`: x Specify `min_version` when `package = "R"`. --- Code use_package("R", type = "Depends", min_version = "3.6") Message v Adding R to 'Depends' field in DESCRIPTION. --- Code use_package("R", type = "Depends", min_version = TRUE) Message v Increasing R version to ">= 4.1" in DESCRIPTION. # use_package(type = 'Suggests') guidance w/o and w/ rlang Code use_package("withr", "Suggests") Message v Adding withr to 'Suggests' field in DESCRIPTION. [ ] Use `requireNamespace("withr", quietly = TRUE)` to test if withr is installed. [ ] Then directly refer to functions with `withr::fun()`. --- Code use_package("purrr", "Suggests") Message v Adding purrr to 'Suggests' field in DESCRIPTION. [ ] In your package code, use `rlang::is_installed("purrr")` or `rlang::check_installed("purrr")` to test if purrr is installed. [ ] Then directly refer to functions with `purrr::fun()`. usethis/tests/testthat/_snaps/github-actions.md0000644000176200001440000000253514720402065021472 0ustar liggesusers# use_github_action() allows for custom urls Code use_github_action(url = "https://raw.githubusercontent.com/r-lib/actions/v2/examples/check-full.yaml", readme = "https://github.com/r-lib/actions/blob/v2/examples/README.md") Message v Creating '.github/'. v Adding "^\\.github$" to '.Rbuildignore'. v Adding "*.html" to '.github/.gitignore'. v Creating '.github/workflows/'. v Saving "r-lib/actions/examples/check-full.yaml@v2" to '.github/workflows/R-CMD-check.yaml'. [ ] Learn more at . v Adding "R-CMD-check badge" to 'README.md'. # use_github_action() still errors in non-interactive environment Code use_github_action() Condition Error in `use_github_action()`: ! `name` is absent and must be supplied # use_github_action() accepts a ref Code read_utf8(proj_path(".github/workflows/R-CMD-check.yaml"), n = 1) Output [1] "# Workflow derived from https://github.com/r-lib/actions/tree/master/examples" # check_uses_github_actions() can throw error Code check_uses_github_actions() Condition Error in `check_uses_github_actions()`: x Cannot detect that package {TESTPKG} already uses GitHub Actions. i Do you need to run `use_github_action()`? usethis/tests/testthat/_snaps/lifecycle.md0000644000176200001440000000112114720402072020475 0ustar liggesusers# use_lifecycle() imports badges Code use_lifecycle() Message v Adding lifecycle to 'Imports' field in DESCRIPTION. [ ] Refer to functions with `lifecycle::fun()`. v Adding "@importFrom lifecycle deprecated" to 'R/{TESTPKG}-package.R'. v Writing 'NAMESPACE'. v Creating 'man/figures/'. v Copied SVG badges to 'man/figures/'. [ ] Add badges in documentation topics by inserting a line like this: #' `r lifecycle::badge('experimental')` #' `r lifecycle::badge('superseded')` #' `r lifecycle::badge('deprecated')` usethis/tests/testthat/_snaps/ui-legacy.md0000644000176200001440000000076614720402075020436 0ustar liggesusers# basic legacy UI actions behave as expected Code ui_line("line") Message line Code ui_todo("to do") Message * to do Code ui_done("done") Message v done Code ui_oops("oops") Message x oops Code ui_info("info") Message i info Code ui_code_block(c("x <- 1", "y <- 2")) Message x <- 1 y <- 2 Code ui_warn("a warning") Condition Warning: a warning usethis/tests/testthat/_snaps/upkeep.md0000644000176200001440000001543114720402076020044 0ustar liggesusers# tidy upkeep bullets don't change accidentally Code writeLines(tidy_upkeep_checklist()) Output ### To begin * [ ] `pr_init("upkeep-2025-01")` ### Pre-history * [ ] `usethis::use_readme_rmd()` * [ ] `usethis::use_roxygen_md()` * [ ] `usethis::use_github_links()` * [ ] `usethis::use_pkgdown_github_pages()` * [ ] `usethis::use_tidy_github_labels()` * [ ] `usethis::use_tidy_style()` * [ ] `urlchecker::url_check()` ### 2020 * [ ] `usethis::use_package_doc()` * [ ] `usethis::use_testthat(3)` * [ ] Align the names of `R/` files and `test/` files ### 2021 * [ ] Remove check environments section from `cran-comments.md` * [ ] Use lifecycle instead of artisanal deprecation messages ### 2022 * [ ] Handle and close any still-open `master` --> `main` issues * [ ] `usethis:::use_codecov_badge("OWNER/REPO")` * [ ] Update pkgdown site using instructions at * [ ] Update lifecycle badges with more accessible SVGs: `usethis::use_lifecycle()` ### 2023 * [ ] Update email addresses *@rstudio.com -> *@posit.co * [ ] Update copyright holder in DESCRIPTION: `person("Posit Software, PBC", role = c("cph", "fnd"))` * [ ] Run `devtools::document()` to re-generate package-level help topic with DESCRIPTION changes * [ ] `usethis::use_tidy_logo(); pkgdown::build_favicons(overwrite = TRUE)` * [ ] `usethis::use_tidy_coc()` * [ ] Use `pak::pak("OWNER/REPO")` in README * [ ] Consider running `usethis::use_tidy_dependencies()` and/or replace compat files with `use_standalone()` * [ ] Use cli errors or [file an issue](new) if you don't have time to do it now * [ ] `usethis::use_standalone("r-lib/rlang", "types-check")` instead of home grown argument checkers; or [file an issue](new) if you don't have time to do it now * [ ] Add alt-text to pictures, plots, etc; see https://posit.co/blog/knitr-fig-alt/ for examples ### To finish * [ ] `usethis::use_mit_license()` * [ ] `usethis::use_package("R", "Depends", "4.0")` * [ ] `usethis::use_tidy_description()` * [ ] `usethis::use_tidy_github_actions()` * [ ] `devtools::build_readme()` * [ ] [Re-publish released site](https://pkgdown.r-lib.org/dev/articles/how-to-update-released-site.html) if needed Created on 2025-01-01 with `usethis::use_tidy_upkeep_issue()`, using [usethis v1.1.0](https://usethis.r-lib.org) # tidy upkeep omits bullets present in last_upkeep Code writeLines(tidy_upkeep_checklist()) Output ### To begin * [ ] `pr_init("upkeep-2025-01")` ### 2023 * [ ] Update email addresses *@rstudio.com -> *@posit.co * [ ] Update copyright holder in DESCRIPTION: `person("Posit Software, PBC", role = c("cph", "fnd"))` * [ ] Run `devtools::document()` to re-generate package-level help topic with DESCRIPTION changes * [ ] `usethis::use_tidy_logo(); pkgdown::build_favicons(overwrite = TRUE)` * [ ] `usethis::use_tidy_coc()` * [ ] Use `pak::pak("OWNER/REPO")` in README * [ ] Consider running `usethis::use_tidy_dependencies()` and/or replace compat files with `use_standalone()` * [ ] Use cli errors or [file an issue](new) if you don't have time to do it now * [ ] `usethis::use_standalone("r-lib/rlang", "types-check")` instead of home grown argument checkers; or [file an issue](new) if you don't have time to do it now * [ ] Add alt-text to pictures, plots, etc; see https://posit.co/blog/knitr-fig-alt/ for examples ### To finish * [ ] `usethis::use_mit_license()` * [ ] `usethis::use_package("R", "Depends", "4.0")` * [ ] `usethis::use_tidy_description()` * [ ] `usethis::use_tidy_github_actions()` * [ ] `devtools::build_readme()` * [ ] [Re-publish released site](https://pkgdown.r-lib.org/dev/articles/how-to-update-released-site.html) if needed Created on 2025-01-01 with `usethis::use_tidy_upkeep_issue()`, using [usethis v1.1.0](https://usethis.r-lib.org) # upkeep bullets don't change accidentally Code writeLines(upkeep_checklist()) Output * [ ] `usethis::use_readme_rmd()` * [ ] `usethis::use_github_links()` * [ ] `usethis::use_pkgdown_github_pages()` * [ ] `usethis::use_tidy_description()` * [ ] `usethis::use_package_doc()` Consider letting usethis manage your `@importFrom` directives here. `usethis::use_import_from()` is handy for this. * [ ] `usethis::use_testthat()`. Learn more about testing at * [ ] Align the names of `R/` files and `test/` files for workflow happiness. The docs for `usethis::use_r()` include a helpful script. `usethis::rename_files()` may be be useful. * [ ] `usethis::use_code_of_conduct()` * [ ] Add alt-text to pictures, plots, etc; see for examples Set up or update GitHub Actions. \ Updating workflows to the latest version will often fix troublesome actions: * [ ] `usethis::use_github_action('check-standard')` Created on 2023-01-01 with `usethis::use_upkeep_issue()`, using [usethis v1.1.0](https://usethis.r-lib.org) --- Code writeLines(checklist) Output * [ ] `usethis::use_readme_rmd()` * [ ] `usethis::use_github_links()` * [ ] `usethis::use_pkgdown_github_pages()` * [ ] `usethis::use_tidy_description()` * [ ] `usethis::use_package_doc()` Consider letting usethis manage your `@importFrom` directives here. `usethis::use_import_from()` is handy for this. * [ ] `usethis::use_testthat(3)` and upgrade to 3e, [testthat 3e vignette](https://testthat.r-lib.org/articles/third-edition.html) * [ ] Align the names of `R/` files and `test/` files for workflow happiness. The docs for `usethis::use_r()` include a helpful script. `usethis::rename_files()` may be be useful. * [ ] Consider changing default branch from `master` to `main` * [ ] Remove description of test environments from `cran-comments.md`. See `usethis::use_cran_comments()`. * [ ] Add alt-text to pictures, plots, etc; see for examples Set up or update GitHub Actions. \ Updating workflows to the latest version will often fix troublesome actions: * [ ] `usethis::use_github_action('check-standard')` * [ ] `usethis::use_github_action('test-coverage')` Created on 2023-01-01 with `usethis::use_upkeep_issue()`, using [usethis v1.1.0](https://usethis.r-lib.org) usethis/tests/testthat/_snaps/vignette.md0000644000176200001440000000130514721145632020376 0ustar liggesusers# use_vignette() gives useful errors Code use_vignette() Condition Error in `use_vignette()`: ! `name` is absent but must be supplied. Code use_vignette("bad name") Condition Error in `check_vignette_name()`: x "bad name" is not a valid filename for a vignette. It must: i Start with a letter. i Contain only letters, numbers, '_', and '-'. # we error informatively for bad vignette extension Code check_vignette_extension("Rnw") Condition Error in `check_vignette_extension()`: x Unsupported file extension: "Rnw" i usethis can only create a vignette or article with one of these extensions: "Rmd" or "qmd". usethis/tests/testthat/_snaps/utils-github.md0000644000176200001440000001746414720402077021204 0ustar liggesusers# we understand the list of all possible configs Code all_configs() Output [1] "no_github" "ours" [3] "theirs" "maybe_ours_or_theirs" [5] "fork" "maybe_fork" [7] "fork_cannot_push_origin" "fork_upstream_is_not_origin_parent" [9] "upstream_but_origin_is_not_fork" # 'no_github' is reported correctly Code new_no_github() Message * Type = "no_github" * Host = "NA" * Config supports a pull request = FALSE * origin = * upstream = ! Neither "origin" nor "upstream" is a GitHub repo. i Read more about the GitHub remote configurations that usethis supports at: . # 'ours' is reported correctly Code new_ours() Message * Type = "ours" * Host = "https://github.com" * Config supports a pull request = TRUE * origin = "OWNER/REPO" (can push) * upstream = i "origin" is both the source and primary repo. i Read more about the GitHub remote configurations that usethis supports at: . # 'theirs' is reported correctly Code new_theirs() Message * Type = "theirs" * Host = "https://github.com" * Config supports a pull request = FALSE * origin = "OWNER/REPO" (can not push) * upstream = ! The only configured GitHub remote is "origin", which you cannot push to. i If your goal is to make a pull request, you must fork-and-clone. i `usethis::create_from_github()` can do this. i Read more about the GitHub remote configurations that usethis supports at: . # 'fork' is reported correctly Code new_fork() Message * Type = "fork" * Host = "https://github.com" * Config supports a pull request = TRUE * origin = "CONTRIBUTOR/REPO" (can push) = fork of "OWNER/REPO" * upstream = "OWNER/REPO" (can not push) i "origin" is a fork of "OWNER/REPO", which is configured as the "upstream" remote. i Read more about the GitHub remote configurations that usethis supports at: . # 'maybe_ours_or_theirs' is reported correctly Code new_maybe_ours_or_theirs() Message * Type = "maybe_ours_or_theirs" * Host = "https://github.com" * Config supports a pull request = NA * origin = "OWNER/REPO" * upstream = ! "origin" is a GitHub repo and "upstream" is either not configured or is not a GitHub repo. i We may be offline or you may need to configure a GitHub personal access token. i `usethis::gh_token_help()` can help with that. i Read more about what this GitHub remote configuration means at: . # 'maybe_fork' is reported correctly Code new_maybe_fork() Message * Type = "maybe_fork" * Host = "https://github.com" * Config supports a pull request = NA * origin = "CONTRIBUTOR/REPO" * upstream = "OWNER/REPO" ! Both "origin" and "upstream" appear to be GitHub repos. However, we can't confirm their relationship to each other (e.g., fork and fork parent) or your permissions (e.g. push access). i We may be offline or you may need to configure a GitHub personal access token. i `usethis::gh_token_help()` can help with that. i Read more about what this GitHub remote configuration means at: . # 'fork_cannot_push_origin' is reported correctly Code new_fork_cannot_push_origin() Message * Type = "fork_cannot_push_origin" * Host = "https://github.com" * Config supports a pull request = FALSE * origin = "CONTRIBUTOR/REPO" * upstream = "OWNER/REPO" ! The "origin" remote is a fork, but you can't push to it. i Read more about the GitHub remote configurations that usethis supports at: . # 'fork_upstream_is_not_origin_parent' is reported correctly Code new_fork_upstream_is_not_origin_parent() Message * Type = "fork_upstream_is_not_origin_parent" * Host = "https://github.com" * Config supports a pull request = FALSE * origin = "CONTRIBUTOR/REPO" (can push) = fork of "NEW_OWNER/REPO" * upstream = "OLD_OWNER/REPO" (can not push) ! The "origin" GitHub remote is a fork, but its parent is not configured as the "upstream" remote. i Read more about the GitHub remote configurations that usethis supports at: . # 'upstream_but_origin_is_not_fork' is reported correctly Code new_upstream_but_origin_is_not_fork() Message * Type = "upstream_but_origin_is_not_fork" * Host = "https://github.com" * Config supports a pull request = FALSE * origin = "CONTRIBUTOR/REPO" * upstream = "OWNER/REPO" ! Both "origin" and "upstream" are GitHub remotes, but "origin" is not a fork and, in particular, is not a fork of "upstream". i Read more about the GitHub remote configurations that usethis supports at: . # 'fork_upstream_is_not_origin_parent' is detected correctly Code stop_bad_github_remote_config(cfg) Condition Error in `stop_bad_github_remote_config()`: x Unsupported GitHub remote configuration: "fork_upstream_is_not_origin_parent" * Host = "https://github.com" * origin = "jennybc/gh" (can push) = fork of "r-lib/gh" * upstream = "r-pkgs/gh" (can push) ! The "origin" GitHub remote is a fork, but its parent is not configured as the "upstream" remote. i Read more about the GitHub remote configurations that usethis supports at: . # bad github config error Code stop_bad_github_remote_config(new_fork_upstream_is_not_origin_parent()) Condition Error in `stop_bad_github_remote_config()`: x Unsupported GitHub remote configuration: "fork_upstream_is_not_origin_parent" * Host = "https://github.com" * origin = "CONTRIBUTOR/REPO" (can push) = fork of "NEW_OWNER/REPO" * upstream = "OLD_OWNER/REPO" (can not push) ! The "origin" GitHub remote is a fork, but its parent is not configured as the "upstream" remote. i Read more about the GitHub remote configurations that usethis supports at: . # maybe bad github config error Code stop_maybe_github_remote_config(new_maybe_fork()) Condition Error in `stop_maybe_github_remote_config()`: x Pull request functions can't work with GitHub remote configuration: "maybe_fork". i The most likely problem is that we aren't discovering your GitHub personal access token. * Host = "https://github.com" * origin = "CONTRIBUTOR/REPO" * upstream = "OWNER/REPO" ! Both "origin" and "upstream" appear to be GitHub repos. However, we can't confirm their relationship to each other (e.g., fork and fork parent) or your permissions (e.g. push access). i We may be offline or you may need to configure a GitHub personal access token. i `usethis::gh_token_help()` can help with that. i Read more about what this GitHub remote configuration means at: . usethis/tests/testthat/_snaps/news.md0000644000176200001440000000124614720402073017523 0ustar liggesusers# use_news_md() sets (development version)/'Initial submission' in new pkg Code writeLines(read_utf8(proj_path("NEWS.md"))) Output # {TESTPKG} (development version) * Initial CRAN submission. # use_news_md() sets bullet to 'Added a NEWS.md file...' when on CRAN Code writeLines(read_utf8(proj_path("NEWS.md"))) Output # {TESTPKG} (development version) * Added a `NEWS.md` file to track changes to the package. # use_news_md() sets version number when 'production version' Code writeLines(read_utf8(proj_path("NEWS.md"))) Output # {TESTPKG} 0.2.0 * Initial CRAN submission. usethis/tests/testthat/_snaps/proj-desc.md0000644000176200001440000000117514720402073020436 0ustar liggesusers# proj_desc_field_update() only messages when adding Code proj_desc_field_update("Config/Needs/foofy", "alfa", append = TRUE) Message v Adding "alfa" to 'Config/Needs/foofy'. Code proj_desc_field_update("Config/Needs/foofy", "alfa", append = TRUE) proj_desc_field_update("Config/Needs/foofy", "bravo", append = TRUE) Message v Adding "bravo" to 'Config/Needs/foofy'. # proj_desc_field_update() works with multiple values Code proj_desc_field_update("Config/Needs/foofy", c("alfa", "bravo"), append = TRUE) Message v Adding "alfa" and "bravo" to 'Config/Needs/foofy'. usethis/tests/testthat/_snaps/git-default-branch.md0000644000176200001440000000144614720402071022207 0ustar liggesusers# git_default_branch_rename() surfaces files that smell fishy Code git_default_branch_rename() Message i Local branch "master" appears to play the role of the default branch. v Moving local "master" branch to "main". [ ] Be sure to update files that refer to the default branch by name. Consider searching within your project for "master". x This GitHub Action file doesn't mention the new default branch "main": '.github/workflows/blah.yml' x Some badges appear to refer to the old default branch "master". [ ] Check and correct, if needed, in this file: 'README.md' x The bookdown configuration file may refer to the old default branch "master". [ ] Check and correct, if needed, in this file: 'whatever/foo/_bookdown.yaml' usethis/tests/testthat/_snaps/utils-ui.md0000644000176200001440000002461314720402077020331 0ustar liggesusers# ui_bullets() look as expected [plain] Code ui_bullets(c(`_` = "todo", v = "done", x = "oops", i = "info", "noindent", ` ` = "indent", `*` = "bullet", `>` = "arrow", `!` = "warning")) Message [ ] todo v done x oops i info noindent indent * bullet > arrow ! warning # ui_bullets() look as expected [ansi] Code ui_bullets(c(`_` = "todo", v = "done", x = "oops", i = "info", "noindent", ` ` = "indent", `*` = "bullet", `>` = "arrow", `!` = "warning")) Message [ ] todo v done x oops i info noindent indent * bullet > arrow ! warning # ui_bullets() look as expected [unicode] Code ui_bullets(c(`_` = "todo", v = "done", x = "oops", i = "info", "noindent", ` ` = "indent", `*` = "bullet", `>` = "arrow", `!` = "warning")) Message ☠todo ✔ done ✖ oops ℹ info noindent indent • bullet → arrow ! warning # ui_bullets() look as expected [fancy] Code ui_bullets(c(`_` = "todo", v = "done", x = "oops", i = "info", "noindent", ` ` = "indent", `*` = "bullet", `>` = "arrow", `!` = "warning")) Message ☠todo ✔ done ✖ oops ℹ info noindent indent • bullet → arrow ! warning # ui_bullets() does glue interpolation and inline markup [plain] Code ui_bullets(c(i = "Hello, {x}!", v = "Updated the {.field BugReports} field", x = "Scary {.code code} or {.fun function}")) Message i Hello, world! v Updated the 'BugReports' field x Scary `code` or `function()` # ui_bullets() does glue interpolation and inline markup [ansi] Code ui_bullets(c(i = "Hello, {x}!", v = "Updated the {.field BugReports} field", x = "Scary {.code code} or {.fun function}")) Message i Hello, world! v Updated the BugReports field x Scary `code` or `function()` # ui_bullets() does glue interpolation and inline markup [unicode] Code ui_bullets(c(i = "Hello, {x}!", v = "Updated the {.field BugReports} field", x = "Scary {.code code} or {.fun function}")) Message ℹ Hello, world! ✔ Updated the 'BugReports' field ✖ Scary `code` or `function()` # ui_bullets() does glue interpolation and inline markup [fancy] Code ui_bullets(c(i = "Hello, {x}!", v = "Updated the {.field BugReports} field", x = "Scary {.code code} or {.fun function}")) Message ℹ Hello, world! ✔ Updated the BugReports field ✖ Scary `code` or `function()` # ui_abort() defaults to 'x' for first bullet Code ui_abort("no explicit bullet") Condition Error: x no explicit bullet # ui_abort() can take explicit first bullet Code ui_abort(c(v = "success bullet")) Condition Error: v success bullet # ui_abort() defaults to 'i' for non-first bullet Code ui_abort(c("oops", ` ` = "space bullet", "info bullet", v = "success bullet")) Condition Error: x oops space bullet i info bullet v success bullet # ui_code_snippet() with scalar input [plain] Code ui_code_snippet( "\n options(\n warnPartialMatchArgs = TRUE,\n warnPartialMatchDollar = TRUE,\n warnPartialMatchAttr = TRUE\n )") Message options( warnPartialMatchArgs = TRUE, warnPartialMatchDollar = TRUE, warnPartialMatchAttr = TRUE ) # ui_code_snippet() with scalar input [ansi] Code ui_code_snippet( "\n options(\n warnPartialMatchArgs = TRUE,\n warnPartialMatchDollar = TRUE,\n warnPartialMatchAttr = TRUE\n )") Message options( warnPartialMatchArgs = TRUE, warnPartialMatchDollar = TRUE, warnPartialMatchAttr = TRUE ) # ui_code_snippet() with vector input [plain] Code ui_code_snippet(c("options(", " warnPartialMatchArgs = TRUE,", " warnPartialMatchDollar = TRUE,", " warnPartialMatchAttr = TRUE", ")")) Message options( warnPartialMatchArgs = TRUE, warnPartialMatchDollar = TRUE, warnPartialMatchAttr = TRUE ) # ui_code_snippet() with vector input [ansi] Code ui_code_snippet(c("options(", " warnPartialMatchArgs = TRUE,", " warnPartialMatchDollar = TRUE,", " warnPartialMatchAttr = TRUE", ")")) Message options( warnPartialMatchArgs = TRUE, warnPartialMatchDollar = TRUE, warnPartialMatchAttr = TRUE ) # ui_code_snippet() when language is not R [plain] Code ui_code_snippet("#include <{h}>", language = "") Message #include # ui_code_snippet() when language is not R [ansi] Code ui_code_snippet("#include <{h}>", language = "") Message #include # ui_code_snippet() can interpolate [plain] Code ui_code_snippet("if (1) {true_val} else {false_val}") Message if (1) TRUE else 'FALSE' # ui_code_snippet() can interpolate [ansi] Code ui_code_snippet("if (1) {true_val} else {false_val}") Message if (1) TRUE else 'FALSE' # ui_code_snippet() can NOT interpolate [plain] Code ui_code_snippet("foo <- function(x){x}", interpolate = FALSE) Message foo <- function(x){x} Code ui_code_snippet("foo <- function(x){{x}}", interpolate = TRUE) Message foo <- function(x){x} # ui_code_snippet() can NOT interpolate [ansi] Code ui_code_snippet("foo <- function(x){x}", interpolate = FALSE) Message foo <- function(x){x} Code ui_code_snippet("foo <- function(x){{x}}", interpolate = TRUE) Message foo <- function(x){x} # bulletize() works Code ui_bullets(bulletize(letters)) Message * a * b * c * d * e ... and 21 more --- Code ui_bullets(bulletize(letters, bullet = "x")) Message x a x b x c x d x e ... and 21 more --- Code ui_bullets(bulletize(letters, n_show = 2)) Message * a * b ... and 24 more --- Code ui_bullets(bulletize(letters[1:6])) Message * a * b * c * d * e * f --- Code ui_bullets(bulletize(letters[1:7])) Message * a * b * c * d * e * f * g --- Code ui_bullets(bulletize(letters[1:8])) Message * a * b * c * d * e ... and 3 more --- Code ui_bullets(bulletize(letters[1:6], n_fudge = 0)) Message * a * b * c * d * e ... and 1 more --- Code ui_bullets(bulletize(letters[1:8], n_fudge = 3)) Message * a * b * c * d * e * f * g * h # ui_special() works [plain] Code cli::cli_text(ui_special()) Message --- Code cli::cli_text(ui_special("whatever")) Message # ui_special() works [ansi] Code cli::cli_text(ui_special()) Message  --- Code cli::cli_text(ui_special("whatever")) Message  # kv_line() looks as expected in basic use [plain] Code kv_line("CHARACTER", "VALUE") Message * CHARACTER: "VALUE" Code kv_line("NUMBER", 1) Message * NUMBER: 1 Code kv_line("LOGICAL", TRUE) Message * LOGICAL: TRUE # kv_line() looks as expected in basic use [fancy] Code kv_line("CHARACTER", "VALUE") Message • CHARACTER: "VALUE" Code kv_line("NUMBER", 1) Message • NUMBER: 1 Code kv_line("LOGICAL", TRUE) Message • LOGICAL: TRUE # kv_line() can interpolate and style inline in key [plain] Code kv_line("Let's reveal {.field {field}}", "whatever") Message * Let's reveal 'SOME_FIELD': "whatever" # kv_line() can interpolate and style inline in key [fancy] Code kv_line("Let's reveal {.field {field}}", "whatever") Message • Let's reveal SOME_FIELD: "whatever" # kv_line() can treat value in different ways [plain] Code kv_line("Key", value) Message * Key: "some value" Code kv_line("Something we don't have", NULL) Message * Something we don't have: Code kv_line("Key", ui_special("discovered")) Message * Key: Code kv_line("Key", "something {.emph important}") Message * Key: "something {.emph important}" Code kv_line("Key", I("something {.emph important}")) Message * Key: something important Code kv_line("Key", I("something {.emph {adjective}}")) Message * Key: something great Code kv_line("Interesting file", I("{.url {url}}")) Message * Interesting file: # kv_line() can treat value in different ways [fancy] Code kv_line("Key", value) Message • Key: "some value" Code kv_line("Something we don't have", NULL) Message • Something we don't have:  Code kv_line("Key", ui_special("discovered")) Message • Key:  Code kv_line("Key", "something {.emph important}") Message • Key: "something {.emph important}" Code kv_line("Key", I("something {.emph important}")) Message • Key: something important Code kv_line("Key", I("something {.emph {adjective}}")) Message • Key: something great Code kv_line("Interesting file", I("{.url {url}}")) Message • Interesting file:  usethis/tests/testthat/_snaps/tidyverse.md0000644000176200001440000000123114720402075020561 0ustar liggesusers# use_tidy_dependencies() isn't overly informative Code use_tidy_dependencies() Message v Adding rlang to 'Imports' field in DESCRIPTION. v Adding lifecycle to 'Imports' field in DESCRIPTION. v Adding cli to 'Imports' field in DESCRIPTION. v Adding glue to 'Imports' field in DESCRIPTION. v Adding withr to 'Imports' field in DESCRIPTION. v Adding "@import rlang" to 'R/{TESTPKG}-package.R'. v Adding "@importFrom glue glue" to 'R/{TESTPKG}-package.R'. v Adding "@importFrom lifecycle deprecated" to 'R/{TESTPKG}-package.R'. v Writing 'NAMESPACE'. v Writing 'R/import-standalone-purrr.R'. usethis/tests/testthat/_snaps/use_standalone.md0000644000176200001440000001205414720402076021555 0ustar liggesusers# standalone_header() works with various inputs Code standalone_header("OWNER/REPO", "R/standalone-foo.R") Output [1] "# Standalone file: do not edit by hand" [2] "# Source: https://github.com/OWNER/REPO/blob/HEAD/R/standalone-foo.R" [3] "# Generated by: usethis::use_standalone(\"OWNER/REPO\", \"foo\")" [4] "# ----------------------------------------------------------------------" [5] "#" --- Code standalone_header("OWNER/REPO", "R/standalone-foo.R", ref = "blah") Output [1] "# Standalone file: do not edit by hand" [2] "# Source: https://github.com/OWNER/REPO/blob/blah/R/standalone-foo.R" [3] "# Generated by: usethis::use_standalone(\"OWNER/REPO\", \"foo\", ref = \"blah\")" [4] "# ----------------------------------------------------------------------" [5] "#" --- Code standalone_header("OWNER/REPO", "R/standalone-foo.R", host = "https://github.com") Output [1] "# Standalone file: do not edit by hand" [2] "# Source: https://github.com/OWNER/REPO/blob/HEAD/R/standalone-foo.R" [3] "# Generated by: usethis::use_standalone(\"OWNER/REPO\", \"foo\")" [4] "# ----------------------------------------------------------------------" [5] "#" --- Code standalone_header("OWNER/REPO", "R/standalone-foo.R", host = "https://github.acme.com") Output [1] "# Standalone file: do not edit by hand" [2] "# Source: https://github.acme.com/OWNER/REPO/blob/HEAD/R/standalone-foo.R" [3] "# Generated by: usethis::use_standalone(\"OWNER/REPO\", \"foo\", host = \"https://github.acme.com\")" [4] "# ----------------------------------------------------------------------" [5] "#" --- Code standalone_header("OWNER/REPO", "R/standalone-foo.R", ref = "blah", host = "https://github.com") Output [1] "# Standalone file: do not edit by hand" [2] "# Source: https://github.com/OWNER/REPO/blob/blah/R/standalone-foo.R" [3] "# Generated by: usethis::use_standalone(\"OWNER/REPO\", \"foo\", ref = \"blah\")" [4] "# ----------------------------------------------------------------------" [5] "#" --- Code standalone_header("OWNER/REPO", "R/standalone-foo.R", ref = "blah", host = "https://github.acme.com") Output [1] "# Standalone file: do not edit by hand" [2] "# Source: https://github.acme.com/OWNER/REPO/blob/blah/R/standalone-foo.R" [3] "# Generated by: usethis::use_standalone(\"OWNER/REPO\", \"foo\", ref = \"blah\", host = \"https://github.acme.com\")" [4] "# ----------------------------------------------------------------------" [5] "#" # can offer choices Code standalone_choose("tidyverse/forcats", ref = "v1.0.0") Condition Error: ! No standalone files found in tidyverse/forcats. Code standalone_choose("r-lib/rlang", ref = "4670cb233ecc8d11") Condition Error: ! `file` is absent, but must be supplied. i Possible options are cli, downstream-deps, lazyeval, lifecycle, linked-version, obj-type, purrr, rlang, s3-register, sizes, types-check, vctrs, or zeallot. # can extract imports Code extract_imports("# imports: rlang (== 1.0.0)") Condition Error in `extract_imports()`: ! Version specification must use `>=`. Code extract_imports("# imports: rlang (>= 1.0.0), purrr") Condition Error in `extract_imports()`: ! Version field can't contain comma. i Do you need to wrap in a list? Code extract_imports("# imports: foo (>=0.0.0)") Condition Error in `extract_imports()`: ! Can't parse version `foo (>=0.0.0)` in `imports:` field. i Example of expected version format: `rlang (>= 1.0.0)`. # errors on malformed dependencies Code standalone_dependencies(c(), "test.R") Condition Error: ! Can't find yaml metadata in 'test.R'. Code standalone_dependencies(c("# ---", "# dependencies: 1", "# ---"), "test.R") Condition Error: ! Invalid dependencies specification in 'test.R'. usethis/tests/testthat/_snaps/author.md0000644000176200001440000000234414720402065020052 0ustar liggesusers# Legacy author fields are challenged Code challenge_legacy_author_fields() Message x Found legacy 'Author' and/or 'Maintainer' field in DESCRIPTION. usethis only supports modification of the 'Authors@R' field. i We recommend one of these paths forward: [ ] Delete the legacy fields and rebuild with `use_author()`; or [ ] Convert to 'Authors@R' with `desc::desc_coerce_authors_at_r()`, then delete the legacy fields. Condition Error in `ui_yep()`: x User input required, but session is not interactive. i Query: "Do you want to cancel this operation and sort that out first?" # Decline to tweak an existing author Code use_author("Jennifer", "Bryan", role = "cph") Condition Error in `check_author_is_novel()`: x "Jennifer Bryan" already appears in 'Authors@R'. Please make the desired change directly in DESCRIPTION or call the desc package directly. # Placeholder author is challenged Code use_author("Charlie", "Brown") Message v Adding to 'Authors@R' in DESCRIPTION: Charlie Brown [ctb] i 'Authors@R' appears to include a placeholder author: First Last [aut, cre] usethis/tests/testthat/_snaps/usethis-deprecated.md0000644000176200001440000000056514720402076022337 0ustar liggesusers# use_tidy_eval() is deprecated Code use_tidy_eval() Condition Error: ! `use_tidy_eval()` was deprecated in usethis 2.2.0 and is now defunct. i There is no longer a need to systematically import and/or re-export functions i Instead import functions as needed, with e.g.: i usethis::use_import_from("rlang", c(".data", ".env")) usethis/tests/testthat/_snaps/release.md0000644000176200001440000001470314720402067020174 0ustar liggesusers# release bullets don't change accidentally Code writeLines(release_checklist("0.1.0", on_cran = FALSE)) Output First release: * [ ] `usethis::use_news_md()` * [ ] `usethis::use_cran_comments()` * [ ] Update (aspirational) install instructions in README * [ ] Proofread `Title:` and `Description:` * [ ] Check that all exported functions have `@return` and `@examples` * [ ] Check that `Authors@R:` includes a copyright holder (role 'cph') * [ ] Check [licensing of included files](https://r-pkgs.org/license.html#sec-code-you-bundle) * [ ] Review Prepare for release: * [ ] `git pull` * [ ] `usethis::use_github_links()` * [ ] `urlchecker::url_check()` * [ ] `devtools::check(remote = TRUE, manual = TRUE)` * [ ] `devtools::check_win_devel()` * [ ] `git push` * [ ] Draft blog post Submit to CRAN: * [ ] `usethis::use_version('minor')` * [ ] `devtools::submit_cran()` * [ ] Approve email Wait for CRAN... * [ ] Accepted :tada: * [ ] Finish & publish blog post * [ ] Add link to blog post in pkgdown news menu * [ ] `usethis::use_github_release()` * [ ] `usethis::use_dev_version(push = TRUE)` * [ ] `usethis::use_news_md()` * [ ] Share on social media --- Code writeLines(release_checklist("0.0.1", on_cran = TRUE)) Output Prepare for release: * [ ] `git pull` * [ ] Check [current CRAN check results](https://cran.rstudio.org/web/checks/check_results_{TESTPKG}.html) * [ ] `usethis::use_news_md()` * [ ] [Polish NEWS](https://style.tidyverse.org/news.html#news-release) * [ ] `usethis::use_github_links()` * [ ] `urlchecker::url_check()` * [ ] `devtools::check(remote = TRUE, manual = TRUE)` * [ ] `devtools::check_win_devel()` * [ ] `revdepcheck::revdep_check(num_workers = 4)` * [ ] Update `cran-comments.md` * [ ] `git push` Submit to CRAN: * [ ] `usethis::use_version('patch')` * [ ] `devtools::submit_cran()` * [ ] Approve email Wait for CRAN... * [ ] Accepted :tada: * [ ] `usethis::use_github_release()` * [ ] `usethis::use_dev_version(push = TRUE)` * [ ] `usethis::use_news_md()` --- Code writeLines(release_checklist("1.0.0", on_cran = TRUE)) Output Prepare for release: * [ ] `git pull` * [ ] Check [current CRAN check results](https://cran.rstudio.org/web/checks/check_results_{TESTPKG}.html) * [ ] `usethis::use_news_md()` * [ ] [Polish NEWS](https://style.tidyverse.org/news.html#news-release) * [ ] `usethis::use_github_links()` * [ ] `urlchecker::url_check()` * [ ] `devtools::check(remote = TRUE, manual = TRUE)` * [ ] `devtools::check_win_devel()` * [ ] `revdepcheck::revdep_check(num_workers = 4)` * [ ] Update `cran-comments.md` * [ ] `git push` * [ ] Draft blog post Submit to CRAN: * [ ] `usethis::use_version('major')` * [ ] `devtools::submit_cran()` * [ ] Approve email Wait for CRAN... * [ ] Accepted :tada: * [ ] Finish & publish blog post * [ ] Add link to blog post in pkgdown news menu * [ ] `usethis::use_github_release()` * [ ] `usethis::use_dev_version(push = TRUE)` * [ ] `usethis::use_news_md()` * [ ] Share on social media # construct correct revdep bullet Code release_revdepcheck(on_cran = FALSE) Output NULL Code release_revdepcheck(on_cran = TRUE, is_posit_pkg = FALSE) Output [1] "* [ ] `revdepcheck::revdep_check(num_workers = 4)`" Code release_revdepcheck(on_cran = TRUE, is_posit_pkg = TRUE) Output [1] "* [ ] `revdepcheck::cloud_check()`" Code release_revdepcheck(on_cran = TRUE, is_posit_pkg = TRUE, env = env) Output [1] "* [ ] `revdepcheck::cloud_check(extra_revdeps = c(\"waldo\", \"testthat\"))`" # RStudio-ness detection works Code writeLines(release_checklist("1.0.0", on_cran = TRUE)) Output Prepare for release: * [ ] `git pull` * [ ] Check [current CRAN check results](https://cran.rstudio.org/web/checks/check_results_{TESTPKG}.html) * [ ] `usethis::use_news_md()` * [ ] [Polish NEWS](https://style.tidyverse.org/news.html#news-release) * [ ] `usethis::use_github_links()` * [ ] `urlchecker::url_check()` * [ ] `devtools::check(remote = TRUE, manual = TRUE)` * [ ] `devtools::check_win_devel()` * [ ] `revdepcheck::cloud_check()` * [ ] Update `cran-comments.md` * [ ] `git push` * [ ] Draft blog post * [ ] Slack link to draft blog in #open-source-comms Submit to CRAN: * [ ] `usethis::use_version('major')` * [ ] `devtools::submit_cran()` * [ ] Approve email Wait for CRAN... * [ ] Accepted :tada: * [ ] Finish & publish blog post * [ ] Add link to blog post in pkgdown news menu * [ ] `usethis::use_github_release()` * [ ] `usethis::use_dev_version(push = TRUE)` * [ ] `usethis::use_news_md()` * [ ] Share on social media # no revdep release bullets when there are no revdeps Code writeLines(release_checklist("1.0.0", on_cran = TRUE)) Output Prepare for release: * [ ] `git pull` * [ ] Check [current CRAN check results](https://cran.rstudio.org/web/checks/check_results_{TESTPKG}.html) * [ ] `usethis::use_news_md()` * [ ] [Polish NEWS](https://style.tidyverse.org/news.html#news-release) * [ ] `usethis::use_github_links()` * [ ] `urlchecker::url_check()` * [ ] `devtools::check(remote = TRUE, manual = TRUE)` * [ ] `devtools::check_win_devel()` * [ ] Update `cran-comments.md` * [ ] `git push` * [ ] Draft blog post Submit to CRAN: * [ ] `usethis::use_version('major')` * [ ] `devtools::submit_cran()` * [ ] Approve email Wait for CRAN... * [ ] Accepted :tada: * [ ] Finish & publish blog post * [ ] Add link to blog post in pkgdown news menu * [ ] `usethis::use_github_release()` * [ ] `usethis::use_dev_version(push = TRUE)` * [ ] `usethis::use_news_md()` * [ ] Share on social media usethis/tests/testthat/_snaps/rename-files.md0000644000176200001440000000023014720402074021107 0ustar liggesusers# renames src/ files Code rename_files("foo", "bar") Message v Moving 'src/foo.c' and 'src/foo.h' to 'src/bar.c' and 'src/bar.h'. usethis/tests/testthat/_snaps/r.md0000644000176200001440000000323414720402074017010 0ustar liggesusers# use_test_helper() creates a helper file Code use_test_helper(open = FALSE) Condition Error in `use_test_helper()`: x Your package must use testthat to use a helper file. Call `usethis::use_testthat()` to set up testthat. --- Code use_test_helper("foo", open = FALSE) Message i Test helper files are executed at the start of all automated test runs. i `devtools::load_all()` also sources test helper files. [ ] Edit 'tests/testthat/helper-foo.R'. # compute_name() errors if no RStudio Code compute_name() Condition Error: ! `name` is absent but must be specified. # compute_name() validates its inputs Code compute_name("foo.c") Condition Error: ! `name` must have extension "R", not "c". Code compute_name("R/foo.c") Condition Error: ! `name` must be a file name without directory. Code compute_name(c("a", "b")) Condition Error: ! `name` must be a single string Code compute_name("") Condition Error: ! `name` must not be an empty string Code compute_name("****") Condition Error: ! `name` ("****") must be a valid file name. i A valid file name consists of only ASCII letters, numbers, '-', and '_'. # compute_active_name() errors if no files open Code compute_active_name(NULL) Condition Error: ! No file is open in RStudio. i Please specify `name`. # compute_active_name() checks directory Code compute_active_name("foo/bar.R") Condition Error: ! Open file must be code, test, or snapshot. usethis/tests/testthat/_snaps/readme.md0000644000176200001440000001056214720402074020006 0ustar liggesusers# use_readme_md() has expected form for a non-GitHub package Code writeLines(read_utf8("README.md")) Output # {TESTPKG} The goal of {TESTPKG} is to ... ## Installation You can install the development version of {TESTPKG} like so: ``` r # FILL THIS IN! HOW CAN PEOPLE INSTALL YOUR DEV PACKAGE? ``` ## Example This is a basic example which shows you how to solve a common problem: ``` r library({TESTPKG}) ## basic example code ``` # use_readme_md() has expected form for a GitHub package Code writeLines(read_utf8("README.md")) Output # {TESTPKG} The goal of {TESTPKG} is to ... ## Installation You can install the development version of {TESTPKG} from [GitHub](https://github.com/) with: ``` r # install.packages("pak") pak::pak("OWNER/TESTPKG") ``` ## Example This is a basic example which shows you how to solve a common problem: ``` r library({TESTPKG}) ## basic example code ``` # use_readme_rmd() has expected form for a non-GitHub package Code writeLines(read_utf8("README.Rmd")) Output --- output: github_document --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>", fig.path = "man/figures/README-", out.width = "100%" ) ``` # {TESTPKG} The goal of {TESTPKG} is to ... ## Installation You can install the development version of {TESTPKG} like so: ``` r # FILL THIS IN! HOW CAN PEOPLE INSTALL YOUR DEV PACKAGE? ``` ## Example This is a basic example which shows you how to solve a common problem: ```{r example} library({TESTPKG}) ## basic example code ``` What is special about using `README.Rmd` instead of just `README.md`? You can include R chunks like so: ```{r cars} summary(cars) ``` You'll still need to render `README.Rmd` regularly, to keep `README.md` up-to-date. `devtools::build_readme()` is handy for this. You can also embed plots, for example: ```{r pressure, echo = FALSE} plot(pressure) ``` In that case, don't forget to commit and push the resulting figure files, so they display on GitHub and CRAN. # use_readme_rmd() has expected form for a GitHub package Code writeLines(read_utf8("README.Rmd")) Output --- output: github_document --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>", fig.path = "man/figures/README-", out.width = "100%" ) ``` # {TESTPKG} The goal of {TESTPKG} is to ... ## Installation You can install the development version of {TESTPKG} from [GitHub](https://github.com/) with: ``` r # install.packages("pak") pak::pak("OWNER/TESTPKG") ``` ## Example This is a basic example which shows you how to solve a common problem: ```{r example} library({TESTPKG}) ## basic example code ``` What is special about using `README.Rmd` instead of just `README.md`? You can include R chunks like so: ```{r cars} summary(cars) ``` You'll still need to render `README.Rmd` regularly, to keep `README.md` up-to-date. `devtools::build_readme()` is handy for this. You can also embed plots, for example: ```{r pressure, echo = FALSE} plot(pressure) ``` In that case, don't forget to commit and push the resulting figure files, so they display on GitHub and CRAN. usethis/tests/testthat/_snaps/github.md0000644000176200001440000000040214720402071020020 0ustar liggesusers# use_github_links() aborts or appends URLs when it should Code use_github_links(overwrite = FALSE) Condition Error in `proj_desc_field_update()`: x 'URL' has a different value in DESCRIPTION. Use `overwrite = TRUE` to overwrite. usethis/tests/testthat/_snaps/pipe.md0000644000176200001440000000021514720402073017477 0ustar liggesusers# use_pipe(export = FALSE) adds roxygen to package doc Code roxygen_ns_show() Output [1] "#' @importFrom magrittr %>%" usethis/tests/testthat/_snaps/proj.md0000644000176200001440000000141314720402074017516 0ustar liggesusers# check_is_package() can reveal who's asking Code check_is_package("foo()") Condition Error in `check_is_package()`: i foo() (`?usethis::foo`) is designed to work with packages. x Project "{TESTPROJ}" is not an R package. # proj_path() errors with absolute paths Code proj_path(c("/a", "b", "/c")) Condition Error in `proj_path()`: x Paths must be relative to the active project, not absolute. --- Code proj_path("/a", "b", "/c") Condition Error in `proj_path()`: x Paths must be relative to the active project, not absolute. --- Code proj_path("/a", c("b", "/c")) Condition Error in `proj_path()`: x Paths must be relative to the active project, not absolute. usethis/tests/testthat/_snaps/rstudio.md0000644000176200001440000000046314720402075020242 0ustar liggesusers# we error if there isn't exactly one Rproj files Code rproj_path(path) Condition Error: ! "test" is not an RStudio Project. --- Code rproj_path(path) Condition Error: ! "test" must contain a single .Rproj file. i Found 'a.Rproj' and 'b.Rproj'. usethis/tests/testthat/_snaps/utils.md0000644000176200001440000000103414720402077017706 0ustar liggesusers# check_is_named_list() works Code user_facing_function(NULL) Condition Error in `check_is_named_list()`: x `somevar` must be a list, not NULL. --- Code user_facing_function(c(a = "a", b = "b")) Condition Error in `check_is_named_list()`: x `somevar` must be a list, not a character vector. --- Code user_facing_function(list("a", b = 2)) Condition Error in `check_is_named_list()`: x Names of `somevar` must be non-missing, non-empty, and non-duplicated. usethis/tests/testthat/_snaps/coverage.md0000644000176200001440000000100714720402067020340 0ustar liggesusers# we use specific URLs in a codecov badge Code use_codecov_badge("OWNER/REPO") Message ! Can't find a README for the current project. i See `usethis::use_readme_rmd()` for help creating this file. i Badge link will only be printed to screen. [ ] Copy and paste the following lines into 'README': [![Codecov test coverage](https://codecov.io/gh/OWNER/REPO/graph/badge.svg)](https://app.codecov.io/gh/OWNER/REPO) usethis/tests/testthat/_snaps/cpp11.md0000644000176200001440000000026314720402067017474 0ustar liggesusers# check_cpp_register_deps is silent if all installed, emits todo if not Code check_cpp_register_deps() Message [ ] Now install decor and vctrs to use cpp11. usethis/tests/testthat/_snaps/write.md0000644000176200001440000000030014720402100017656 0ustar liggesusers# write_union() messaging is correct with weird working directory Code write_union(proj_path("somefile"), letters[4:6]) Message v Adding "d", "e", and "f" to 'somefile'. usethis/tests/testthat/_snaps/badge.md0000644000176200001440000000174614720402065017617 0ustar liggesusers# use_lifecycle_badge() handles bad and good input Code use_lifecycle_badge("eperimental") Condition Error in `use_lifecycle_badge()`: ! `stage` must be one of "experimental", "stable", "superseded", or "deprecated", not "eperimental". i Did you mean "experimental"? # use_posit_cloud_badge() handles bad and good input Code use_posit_cloud_badge() Condition Error in `use_posit_cloud_badge()`: ! `url` must be a valid name, not absent. --- Code use_posit_cloud_badge(123) Condition Error in `use_posit_cloud_badge()`: ! `url` must be a valid name, not the number 123. --- Code use_posit_cloud_badge("http://posit.cloud/123") Condition Error in `use_posit_cloud_badge()`: x `usethis::use_posit_cloud_badge()` requires a link to an existing Posit Cloud project of the form "https://posit.cloud/content/" or "https://posit.cloud/spaces//content/". usethis/tests/testthat/test-test.R0000644000176200001440000000051014651000165017011 0ustar liggesuserstest_that("check_edition() validates inputs", { expect_error(check_edition(20), "not available") expect_error(check_edition("x"), "single number") expect_equal(check_edition(1.5), 1) if (packageVersion("testthat") >= "2.99") { expect_equal(check_edition(), 3) } else { expect_equal(check_edition(), 2) } }) usethis/tests/testthat/test-cpp11.R0000644000176200001440000000223614717524721017001 0ustar liggesuserstest_that("use_cpp11() requires a package", { create_local_project() local_check_installed() expect_usethis_error(use_cpp11(), "not an R package") }) test_that("use_cpp11() creates files/dirs, edits DESCRIPTION and .gitignore", { create_local_package() use_roxygen_md() use_package_doc() # needed for use_cpp11() local_interactive(FALSE) local_check_installed() local_mocked_bindings(check_cpp_register_deps = function() invisible()) use_cpp11() expect_match(desc::desc_get("LinkingTo"), "cpp11") expect_proj_dir("src") expect_proj_file("src", "code.cpp") ignores <- read_utf8(proj_path("src", ".gitignore")) expect_contains(ignores, c("*.o", "*.so", "*.dll")) }) test_that("check_cpp_register_deps is silent if all installed, emits todo if not", { withr::local_options(list(usethis.quiet = FALSE)) local_mocked_bindings( get_cpp_register_deps = function() c("brio", "decor", "vctrs"), is_installed = function(package) TRUE ) expect_no_message( check_cpp_register_deps() ) local_mocked_bindings( is_installed = function(package) identical(package, "brio") ) expect_snapshot( check_cpp_register_deps() ) }) usethis/tests/testthat/test-logo.R0000644000176200001440000000203014651514262017001 0ustar liggesuserstest_that("use_logo() doesn't error with no README", { skip_if_not_installed("magick") skip_on_os("solaris") create_local_package() img <- magick::image_write(magick::image_read("logo:"), "logo.png") expect_no_error(use_logo("logo.png")) }) test_that("use_logo() shows a clickable path with README", { skip_if_not_installed("magick") skip_on_os("solaris") create_local_package() use_readme_md() img <- magick::image_write(magick::image_read("logo:"), "logo.png") withr::local_options(usethis.quiet = FALSE) expect_snapshot(use_logo("logo.png"), transform = scrub_testpkg) }) # https://github.com/r-lib/usethis/issues/1999 test_that("use_logo() writes a file in lowercase and it knows that", { skip_if_not_installed("magick") skip_on_os("solaris") create_local_package() img <- magick::image_write(magick::image_read("logo:"), "LoGo.PNG") withr::local_options(list(usethis.quiet = FALSE)) expect_snapshot(use_logo("LoGo.PNG"), transform = scrub_testpkg) expect_proj_file("man", "figures", "logo.png") }) usethis/tests/testthat/test-rename-files.R0000644000176200001440000000475614717524721020435 0ustar liggesuserstest_that("checks uncommitted files", { create_local_package() expect_error(rename_files("foo", "bar"), class = "usethis_error") git_init() use_r("foo", open = FALSE) expect_error( rename_files("foo", "bar"), "uncommitted changes", class = "usethis_error" ) }) test_that("renames R and test and snapshot files", { create_local_package() local_mocked_bindings(challenge_uncommitted_changes = function(...) invisible()) git_init() use_r("foo", open = FALSE) rename_files("foo", "bar") expect_proj_file("R/bar.R") use_test("foo", open = FALSE) rename_files("foo", "bar") expect_proj_file("tests/testthat/test-bar.R") dir_create(proj_path("tests", "testthat", "_snaps")) write_utf8(proj_path("tests", "testthat", "_snaps", "foo.md"), "abc") rename_files("foo", "bar") expect_proj_file("tests/testthat/_snaps/bar.md") }) test_that("renames src/ files", { create_local_package() local_mocked_bindings(challenge_uncommitted_changes = function(...) invisible()) git_init() use_src() file_create(proj_path("src/foo.c")) file_create(proj_path("src/foo.h")) withr::local_options(list(usethis.quiet = FALSE)) expect_snapshot({ rename_files("foo", "bar") }) expect_proj_file("src/bar.c") expect_proj_file("src/bar.h") }) test_that("strips context from test file", { create_local_package() local_mocked_bindings(challenge_uncommitted_changes = function(...) invisible()) git_init() use_testthat() write_utf8( proj_path("tests", "testthat", "test-foo.R"), c( "context('bar')", "", "a <- 1" ) ) rename_files("foo", "bar") lines <- read_utf8(proj_path("tests", "testthat", "test-bar.R")) expect_equal(lines, "a <- 1") }) test_that("rename paths in test file", { create_local_package() local_mocked_bindings(challenge_uncommitted_changes = function(...) invisible()) git_init() use_testthat() write_utf8(proj_path("tests", "testthat", "test-foo.txt"), "10") write_utf8(proj_path("tests", "testthat", "test-foo.R"), "test-foo.txt") rename_files("foo", "bar") expect_proj_file("tests/testthat/test-bar.txt") lines <- read_utf8(proj_path("tests", "testthat", "test-bar.R")) expect_equal(lines, "test-bar.txt") }) test_that("does not remove non-R dots in filename", { create_local_package() local_mocked_bindings(challenge_uncommitted_changes = function(...) invisible()) git_init() file_create(proj_path("R/foo.bar.R")) rename_files("foo.bar", "baz.qux") expect_proj_file("R/baz.qux.R") }) usethis/tests/testthat/test-tibble.R0000644000176200001440000000117614651514262017314 0ustar liggesuserstest_that("use_tibble() requires a package", { create_local_project() expect_usethis_error(use_tibble(), "not an R package") }) test_that("use_tibble() Imports tibble and imports tibble::tibble()", { create_local_package() withr::local_options(list(usethis.quiet = FALSE)) local_roxygen_update_ns() local_check_installed() ui_silence(use_package_doc()) local_check_fun_exists() expect_snapshot( use_tibble(), transform = scrub_testpkg ) expect_match(proj_desc()$get("Imports"), "tibble") pkg_doc <- readLines(package_doc_path()) expect_match(pkg_doc, "#' @importFrom tibble tibble", all = FALSE) }) usethis/tests/testthat/test-tidyverse.R0000644000176200001440000000477414717524762020111 0ustar liggesuserstest_that("use_tidy_description() alphabetises dependencies and remotes", { pkg <- create_local_package() use_package("usethis") use_package("desc") use_package("withr", "Suggests") use_package("gh", "Suggests") desc::desc_set_remotes(c("r-lib/styler", "jimhester/lintr")) use_tidy_description() desc <- read_utf8(proj_path("DESCRIPTION")) expect_gt(grep("usethis", desc), grep("desc", desc)) expect_gt(grep("withr", desc), grep("gh", desc)) expect_gt(grep("r\\-lib\\/styler", desc), grep("jimhester\\/lintr", desc)) }) test_that("use_tidy_dependencies() isn't overly informative", { skip_if_offline("github.com") create_local_package() use_package_doc(open = FALSE) withr::local_options(usethis.quiet = FALSE, cli.width = Inf) expect_snapshot( use_tidy_dependencies(), transform = scrub_testpkg ) }) test_that("use_tidy_GITHUB-STUFF() adds and Rbuildignores files", { local_interactive(FALSE) local_target_repo_spec("OWNER/REPO") create_local_package() use_git() use_tidy_contributing() use_tidy_support() use_tidy_issue_template() use_tidy_coc() expect_proj_file(".github/CONTRIBUTING.md") expect_proj_file(".github/ISSUE_TEMPLATE/issue_template.md") expect_proj_file(".github/SUPPORT.md") expect_proj_file(".github/CODE_OF_CONDUCT.md") expect_true(is_build_ignored("^\\.github$")) }) test_that("use_tidy_github() adds and Rbuildignores files", { local_interactive(FALSE) local_target_repo_spec("OWNER/REPO") create_local_package() use_git() use_tidy_github() expect_proj_file(".github/CONTRIBUTING.md") expect_proj_file(".github/ISSUE_TEMPLATE/issue_template.md") expect_proj_file(".github/SUPPORT.md") expect_proj_file(".github/CODE_OF_CONDUCT.md") expect_true(is_build_ignored("^\\.github$")) }) test_that("styling the package works", { skip_if_no_git_user() skip_if_not_installed("styler") pkg <- create_local_package() use_r("bad_style") path_to_bad_style <- proj_path("R/bad_style.R") write_utf8(path_to_bad_style, "a++2\n") capture_output(use_tidy_style()) expect_identical(read_utf8(path_to_bad_style), "a + +2") file_delete(path_to_bad_style) }) test_that("styling of non-packages works", { skip_if_no_git_user() skip_if_not_installed("styler") proj <- create_local_project() path_to_bad_style <- proj_path("R/bad_style.R") use_r("bad_style") write_utf8(path_to_bad_style, "a++22\n") capture_output(use_tidy_style()) expect_identical(read_utf8(path_to_bad_style), "a + +22") file_delete(path_to_bad_style) }) usethis/tests/testthat/test-utils-ui.R0000644000176200001440000001451214651000165017614 0ustar liggesuserscli::test_that_cli("ui_bullets() look as expected", { # suppress test silencing withr::local_options(list(usethis.quiet = FALSE)) expect_snapshot( ui_bullets(c( # relate to legacy functions "_" = "todo", # ui_todo() "v" = "done", # ui_done() "x" = "oops", # ui_oops() "i" = "info", # ui_info() "noindent", # ui_line() # other cli bullets that have no special connection to usethis history " " = "indent", "*" = "bullet", ">" = "arrow", "!" = "warning" )) ) }) test_that("ui_bullets() respect usethis.quiet = TRUE", { withr::local_options(list(usethis.quiet = TRUE)) expect_no_message( ui_bullets(c( # relate to legacy functions "_" = "todo", # ui_todo() "v" = "done", # ui_done() "x" = "oops", # ui_oops() "i" = "info", # ui_info() "noindent", # ui_line() # other cli bullets that have no special connection to usethis history " " = "indent", "*" = "bullet", ">" = "arrow", "!" = "warning" )) ) }) cli::test_that_cli("ui_bullets() does glue interpolation and inline markup", { # suppress test silencing withr::local_options(list(usethis.quiet = FALSE)) x <- "world" expect_snapshot( ui_bullets(c( "i" = "Hello, {x}!", "v" = "Updated the {.field BugReports} field", "x" = "Scary {.code code} or {.fun function}" )) ) }) test_that("trailing slash behaviour of ui_path_impl()", { # target doesn't exist so no empirical evidence that it's a directory expect_match(ui_path_impl("abc"), "abc$") # path suggests it's a directory expect_match(ui_path_impl("abc/"), "abc/$") expect_match(ui_path_impl("abc//"), "abc/$") # path is known to be a directory tmpdir <- withr::local_tempdir(pattern = "ui_path_impl") expect_match(ui_path_impl(tmpdir), "/$") expect_match(ui_path_impl(paste0(tmpdir, "/")), "[^/]/$") expect_match(ui_path_impl(paste0(tmpdir, "//")), "[^/]/$") }) test_that("ui_abort() works", { expect_usethis_error(ui_abort("spatula"), "spatula") # usethis.quiet should have no effect on this withr::local_options(list(usethis.quiet = TRUE)) expect_usethis_error(ui_abort("whisk"), "whisk") }) test_that("ui_abort() defaults to 'x' for first bullet", { expect_snapshot(error = TRUE, ui_abort("no explicit bullet")) }) test_that("ui_abort() can take explicit first bullet", { expect_snapshot(error = TRUE, ui_abort(c("v" = "success bullet"))) }) test_that("ui_abort() defaults to 'i' for non-first bullet", { expect_snapshot( error = TRUE, ui_abort(c( "oops", " " = "space bullet", "info bullet", "v" = "success bullet" )) ) }) cli::test_that_cli("ui_code_snippet() with scalar input", { withr::local_options(list(usethis.quiet = FALSE)) expect_snapshot( ui_code_snippet(" options( warnPartialMatchArgs = TRUE, warnPartialMatchDollar = TRUE, warnPartialMatchAttr = TRUE )") ) }, configs = c("plain", "ansi")) cli::test_that_cli("ui_code_snippet() with vector input", { withr::local_options(list(usethis.quiet = FALSE)) expect_snapshot( ui_code_snippet(c( "options(", " warnPartialMatchArgs = TRUE,", " warnPartialMatchDollar = TRUE,", " warnPartialMatchAttr = TRUE", ")" )) ) }, configs = c("plain", "ansi")) cli::test_that_cli("ui_code_snippet() when language is not R", { withr::local_options(list(usethis.quiet = FALSE)) h <- "blah.h" expect_snapshot( ui_code_snippet("#include <{h}>", language = "") ) }, configs = c("plain", "ansi")) cli::test_that_cli("ui_code_snippet() can interpolate", { withr::local_options(list(usethis.quiet = FALSE)) true_val <- "TRUE" false_val <- "'FALSE'" expect_snapshot( ui_code_snippet("if (1) {true_val} else {false_val}") ) }, configs = c("plain", "ansi")) cli::test_that_cli("ui_code_snippet() can NOT interpolate", { withr::local_options(list(usethis.quiet = FALSE)) expect_snapshot({ ui_code_snippet( "foo <- function(x){x}", interpolate = FALSE ) ui_code_snippet( "foo <- function(x){{x}}", interpolate = TRUE ) }) }, configs = c("plain", "ansi")) test_that("bulletize() works", { withr::local_options(list(usethis.quiet = FALSE)) expect_snapshot(ui_bullets(bulletize(letters))) expect_snapshot(ui_bullets(bulletize(letters, bullet = "x"))) expect_snapshot(ui_bullets(bulletize(letters, n_show = 2))) expect_snapshot(ui_bullets(bulletize(letters[1:6]))) expect_snapshot(ui_bullets(bulletize(letters[1:7]))) expect_snapshot(ui_bullets(bulletize(letters[1:8]))) expect_snapshot(ui_bullets(bulletize(letters[1:6], n_fudge = 0))) expect_snapshot(ui_bullets(bulletize(letters[1:8], n_fudge = 3))) }) test_that("usethis_map_cli() works", { x <- c("aaa", "bbb", "ccc") expect_equal( usethis_map_cli(x, template = "{.file <>}"), c("{.file aaa}", "{.file bbb}", "{.file ccc}") ) }) cli::test_that_cli("ui_special() works", { expect_snapshot(cli::cli_text(ui_special())) expect_snapshot(cli::cli_text(ui_special("whatever"))) }, configs = c("plain", "ansi")) cli::test_that_cli("kv_line() looks as expected in basic use", { withr::local_options(list(usethis.quiet = FALSE)) expect_snapshot({ kv_line("CHARACTER", "VALUE") kv_line("NUMBER", 1) kv_line("LOGICAL", TRUE) }) }, configs = c("plain", "fancy")) cli::test_that_cli("kv_line() can interpolate and style inline in key", { withr::local_options(list(usethis.quiet = FALSE)) field <- "SOME_FIELD" expect_snapshot( kv_line("Let's reveal {.field {field}}", "whatever") ) }, configs = c("plain", "fancy")) cli::test_that_cli("kv_line() can treat value in different ways", { withr::local_options(list(usethis.quiet = FALSE)) value <- "some value" adjective <- "great" url <- "https://usethis.r-lib.org/" expect_snapshot({ # evaluation in .envir kv_line("Key", value) # NULL is special kv_line("Something we don't have", NULL) # explicit special kv_line("Key", ui_special("discovered")) # value taken at face value kv_line("Key", "something {.emph important}") # I() indicates value has markup kv_line("Key", I("something {.emph important}")) kv_line("Key", I("something {.emph {adjective}}")) kv_line("Interesting file", I("{.url {url}}")) }) }, configs = c("plain", "fancy")) usethis/tests/testthat/test-git.R0000644000176200001440000000461414651000165016626 0ustar liggesuserstest_that("uses_git() works", { skip_if_no_git_user() create_local_package() expect_false(uses_git()) expect_error(check_uses_git()) git_init() expect_true(uses_git()) expect_no_error(check_uses_git()) }) test_that('use_git_config(scope = "project") errors if project not using git', { create_local_package() expect_usethis_error( use_git_config(scope = "project", user.name = "USER.NAME"), "Cannot detect that project is already a Git repository" ) }) test_that("use_git_config() can set local config", { skip_if_no_git_user() create_local_package() use_git() use_git_config( scope = "project", user.name = "Jane", user.email = "jane@example.org", init.defaultBranch = "main" ) r <- git_repo() expect_identical(git_cfg_get("user.name", "local"), "Jane") expect_identical(git_cfg_get("user.email", "local"), "jane@example.org") expect_identical(git_cfg_get("init.defaultBranch", "local"), "main") expect_identical(git_cfg_get("init.defaultbranch", "local"), "main") }) test_that("use_git_config() can set a non-existing config field", { skip_if_no_git_user() create_local_package() use_git() expect_null(git_cfg_get("aaa.bbb")) use_git_config(scope = "project", aaa.bbb = "ccc") expect_identical(git_cfg_get("aaa.bbb"), "ccc") }) test_that("use_git_config() facilitates round trips", { skip_if_no_git_user() create_local_package() use_git() orig <- use_git_config(scope = "project", aaa.bbb = "ccc") expect_null(orig$aaa.bbb) expect_identical(git_cfg_get("aaa.bbb"), "ccc") new <- use_git_config(scope = "project", aaa.bbb = NULL) expect_identical(new$aaa.bbb, "ccc") expect_null(git_cfg_get("aaa.bbb")) }) test_that("use_git_hook errors if project not using git", { create_local_package() expect_usethis_error( use_git_hook( "pre-commit", render_template("readme-rmd-pre-commit.sh") ), "Cannot detect that project is already a Git repository" ) }) test_that("git remote handlers work", { skip_if_no_git_user() create_local_package() use_git() expect_null(git_remotes()) use_git_remote(name = "foo", url = "foo_url") expect_identical(git_remotes(), list(foo = "foo_url")) use_git_remote(name = "foo", url = "new_url", overwrite = TRUE) expect_identical(git_remotes(), list(foo = "new_url")) use_git_remote(name = "foo", url = NULL, overwrite = TRUE) expect_null(git_remotes()) }) usethis/tests/testthat/test-proj.R0000644000176200001440000001535314717524762017040 0ustar liggesuserstest_that("proj_set() errors on non-existent path", { expect_usethis_error( proj_set("abcedefgihklmnopqrstuv"), "does not exist" ) }) test_that("proj_set() errors if no criteria are fulfilled", { tmpdir <- withr::local_tempdir(pattern = "i-am-not-a-project") expect_usethis_error( proj_set(tmpdir), "does not appear to be inside a project or package" ) }) test_that("proj_set() can be forced, even if no criteria are fulfilled", { tmpdir <- withr::local_tempdir(pattern = "i-am-not-a-project") expect_no_error(old <- proj_set(tmpdir, force = TRUE)) withr::defer(proj_set(old)) expect_identical(proj_get(), proj_path_prep(tmpdir)) }) test_that("is_package() detects package-hood", { create_local_package() expect_true(is_package()) create_local_project() expect_false(is_package()) }) test_that("check_is_package() errors for non-package", { create_local_project() expect_usethis_error(check_is_package(), "not an R package") }) test_that("check_is_package() can reveal who's asking", { create_local_project() expect_snapshot( error = TRUE, check_is_package("foo()"), transform = scrub_testproj ) }) test_that("proj_path() appends to the project path", { create_local_project() expect_equal( proj_path("a", "b", "c"), path(proj_get(), "a/b/c") ) expect_identical(proj_path("a", "b", "c"), proj_path("a/b/c")) }) test_that("proj_path() errors with absolute paths", { create_local_project() expect_snapshot(proj_path(c("/a", "b", "/c")), error = TRUE) expect_snapshot(proj_path("/a", "b", "/c"), error = TRUE) expect_snapshot(proj_path("/a", c("b", "/c")), error = TRUE) }) test_that("proj_path() with no inputs returns result of length 1, not 0", { create_local_project() expect_equal(proj_path(), proj_get()) }) test_that("proj_rel_path() returns path part below the project", { create_local_project() expect_equal(proj_rel_path(proj_path("a/b/c")), "a/b/c") }) test_that("proj_rel_path() returns path 'as is' if not in project", { create_local_project() expect_identical(proj_rel_path(path_temp()), path_temp()) }) test_that("proj_set() enforces proj path preparation policy", { # specifically: check that proj_get() returns realized path t <- withr::local_tempdir("proj-set-path-prep") # a/b/d and a/b2/d identify same directory a <- path_real(dir_create(path(t, "a"))) b <- dir_create(path(a, "b")) b2 <- link_create(b, path(a, "b2")) d <- dir_create(path(b, "d")) # input path includes symbolic link path_with_symlinks <- path(b2, "d") expect_equal(path_rel(path_with_symlinks, a), path("b2/d")) # force = TRUE local_project(path_with_symlinks, force = TRUE) expect_equal(path_rel(proj_get(), a), path("b/d")) # force = FALSE file_create(path(b, "d", ".here")) proj_set(path_with_symlinks, force = FALSE) expect_equal(path_rel(proj_get(), a), path("b/d")) }) test_that("proj_path_prep() passes NULL through", { expect_null(proj_path_prep(NULL)) }) test_that("is_in_proj() detects whether files are (or would be) in project", { create_local_package() ## file does not exist but would be in project if created expect_true(is_in_proj(proj_path("fiction"))) ## file exists in project expect_true(is_in_proj(proj_path("DESCRIPTION"))) ## file does not exist and would not be in project if created expect_false(is_in_proj(file_temp())) ## file exists and is not in project expect_false(is_in_proj(path_temp())) }) test_that("is_in_proj() does not activate a project", { pkg <- create_local_package() path <- proj_path("DESCRIPTION") expect_true(is_in_proj(path)) local_project(NULL) expect_false(is_in_proj(path)) expect_false(proj_active()) }) test_that("proj_sitrep() reports current working/project state", { pkg <- create_local_package() x <- proj_sitrep() expect_s3_class(x, "sitrep") expect_false(is.null(x[["working_directory"]])) expect_identical( fs::path_file(pkg), fs::path_file(x[["active_usethis_proj"]]) ) }) test_that("with_project() runs code in temp proj, restores (lack of) proj", { old_project <- proj_get_() withr::defer(proj_set_(old_project)) temp_proj <- create_project( file_temp(pattern = "TEMPPROJ"), rstudio = FALSE, open = FALSE ) proj_set_(NULL) expect_null(proj_get_()) res <- with_project(path = temp_proj, proj_get_()) expect_identical(res, temp_proj) expect_null(proj_get_()) }) test_that("with_project() runs code in temp proj, restores original proj", { old_project <- proj_get_() withr::defer(proj_set_(old_project)) host <- create_project( file_temp(pattern = "host"), rstudio = FALSE, open = FALSE ) guest <- create_project( file_temp(pattern = "guest"), rstudio = FALSE, open = FALSE ) proj_set(host) expect_identical(proj_get_(), host) res <- with_project(path = guest, proj_get_()) expect_identical(res, guest) expect_identical(proj_get(), host) }) test_that("with_project() works when temp proj == original proj", { old_project <- proj_get_() withr::defer(proj_set_(old_project)) host <- create_project( file_temp(pattern = "host"), rstudio = FALSE, open = FALSE ) proj_set(host) expect_identical(proj_get_(), host) res <- with_project(path = host, proj_get_()) expect_identical(res, host) expect_identical(proj_get(), host) }) test_that("local_project() activates proj til scope ends", { old_project <- proj_get_() withr::defer(proj_set_(old_project)) new_proj <- file_temp(pattern = "localprojtest") create_project(new_proj, rstudio = FALSE, open = FALSE) proj_set_(NULL) foo <- function() { local_project(new_proj) proj_sitrep() } res <- foo() expect_identical( res[["active_usethis_proj"]], as.character(proj_path_prep(new_proj)) ) expect_null(proj_get_()) }) # https://github.com/r-lib/usethis/issues/954 test_that("proj_activate() works with relative path when RStudio is not detected", { sandbox <- path_real(dir_create(file_temp("sandbox"))) withr::defer(dir_delete(sandbox)) orig_proj <- proj_get_() withr::defer(proj_set(orig_proj, force = TRUE)) withr::local_dir(sandbox) local_rstudio_available(FALSE) rel_path_proj <- path_file(file_temp(pattern = "mno")) out_path <- create_project(rel_path_proj, rstudio = FALSE, open = FALSE) expect_no_error( result <- proj_activate(rel_path_proj) ) expect_true(result) expect_equal(path_wd(), out_path) expect_equal(proj_get(), out_path) }) # https://github.com/r-lib/usethis/issues/1498 test_that("local_project()'s `quiet` argument works", { temp_proj <- create_project( file_temp(pattern = "TEMPPROJ"), rstudio = FALSE, open = FALSE ) withr::defer(dir_delete(temp_proj)) local_project(path = temp_proj, quiet = TRUE, force = TRUE, setwd = FALSE) expect_true(getOption("usethis.quiet")) }) usethis/tests/testthat/test-edit.R0000644000176200001440000001171614651000165016771 0ustar liggesusersexpect_r_file <- function(...) { expect_true(file_exists(path_home_r(...))) } expect_fs_file <- function(...) { expect_true(file_exists(path_home(...))) } test_that("edit_file() creates new directory and another and a file within", { tmp <- file_temp() expect_false(dir_exists(tmp)) capture.output(new_file <- edit_file(path(tmp, "new_dir", "new_file"))) expect_true(dir_exists(tmp)) expect_true(dir_exists(path(tmp, "new_dir"))) expect_true(file_exists(path(tmp, "new_dir", "new_file"))) }) test_that("edit_file() creates new file in existing directory", { tmp <- file_temp() dir_create(tmp) capture.output(new_file <- edit_file(path(tmp, "new_file"))) expect_true(file_exists(path(tmp, "new_file"))) }) test_that("edit_file() copes with path to existing file", { tmp <- file_temp() dir_create(tmp) existing <- file_create(path(tmp, "a_file")) capture.output(res <- edit_file(path(tmp, "a_file"))) expect_identical(existing, res) }) test_that("edit_template() can create a new template", { create_local_package() edit_template("new_template") expect_proj_file("inst/templates/new_template") }) ## testing edit_XXX("user") only on travis and appveyor, because I don't want to ## risk creating user-level files de novo for an actual user, which would ## obligate me to some nerve-wracking clean up test_that("edit_r_XXX() and edit_git_XXX() have default scope", { skip_if_no_git_user() ## run these manually if you already have these files or are happy to ## have them or delete them skip_if_not_ci() ## on Windows, under R CMD check, some env vars are set to sentinel values ## https://github.com/wch/r-source/blob/78da6e06aa0017564ec057b768f98c5c79e4d958/src/library/tools/R/check.R#L257 ## we need to explicitly ensure R_ENVIRON_USER="" here withr::local_envvar(list(R_ENVIRON_USER = "")) expect_no_error(edit_r_profile()) expect_no_error(edit_r_buildignore()) expect_no_error(edit_r_environ()) expect_no_error(edit_r_makevars()) expect_no_error(edit_git_config()) expect_no_error(edit_git_ignore()) }) test_that("edit_r_XXX('user') ensures the file exists", { ## run these manually if you already have these files or are happy to ## have them or delete them skip_if_not_ci() ## on Windows, under R CMD check, some env vars are set to sentinel values ## https://github.com/wch/r-source/blob/78da6e06aa0017564ec057b768f98c5c79e4d958/src/library/tools/R/check.R#L257 ## we need to explicitly ensure R_ENVIRON_USER="" here withr::local_envvar(list(R_ENVIRON_USER = "")) edit_r_environ("user") expect_r_file(".Renviron") edit_r_profile("user") expect_r_file(".Rprofile") edit_r_makevars("user") expect_r_file(".R", "Makevars") }) test_that("edit_r_buildignore() only works with packages", { create_local_project() expect_usethis_error(edit_r_buildignore(), "not an R package") use_description() edit_r_buildignore() expect_proj_file(".Rbuildignore") }) test_that("can edit snippets", { path <- withr::local_tempdir() withr::local_envvar(c("XDG_CONFIG_HOME" = path)) path <- edit_rstudio_snippets(type = "R") expect_true(file_exists(path)) expect_error( edit_rstudio_snippets("not-existing-type"), regexp = "should be one of" ) }) test_that("edit_r_profile() respects R_PROFILE_USER", { path1 <- user_path_prep(file_temp()) withr::local_envvar(list(R_PROFILE_USER = path1)) path2 <- edit_r_profile("user") expect_equal(path1, path2) }) test_that("edit_git_XXX('user') ensures the file exists", { skip_if_no_git_user() ## run these manually if you already have these files or are happy to ## have them or delete them skip_if_not_ci() edit_git_config("user") expect_fs_file(".gitconfig") edit_git_ignore("user") expect_fs_file(".gitignore") expect_match( git_cfg_get("core.excludesfile", where = "global"), "gitignore" ) }) test_that("edit_r_profile() ensures .Rprofile exists in project", { create_local_package() edit_r_profile("project") expect_proj_file(".Rprofile") create_local_project() edit_r_profile("project") expect_proj_file(".Rprofile") }) test_that("edit_r_environ() ensures .Renviron exists in project", { create_local_package() edit_r_environ("project") expect_proj_file(".Renviron") create_local_project() edit_r_environ("project") expect_proj_file(".Renviron") }) test_that("edit_r_makevars() ensures .R/Makevars exists in package", { create_local_package() edit_r_makevars("project") expect_proj_file(".R", "Makevars") }) test_that("edit_git_config() ensures git ignore file exists in project", { create_local_package() edit_git_config("project") expect_proj_file(".git", "config") create_local_project() edit_git_config("project") expect_proj_file(".git", "config") }) test_that("edit_git_ignore() ensures .gitignore exists in project", { create_local_package() edit_git_ignore("project") expect_proj_file(".gitignore") create_local_project() edit_git_ignore("project") expect_proj_file(".gitignore") }) usethis/tests/testthat/test-testthat.R0000644000176200001440000000042014651000165017672 0ustar liggesuserstest_that("use_testhat() sets up infrastructure", { pkg <- create_local_package() use_testthat() expect_match(proj_desc()$get("Suggests"), "testthat") expect_proj_dir("tests", "testthat") expect_proj_file("tests", "testthat.R") expect_true(uses_testthat()) }) usethis/tests/testthat/test-lifecycle.R0000644000176200001440000000055714651000165020004 0ustar liggesuserstest_that("use_lifecycle() imports badges", { create_local_package() use_package_doc() withr::local_options(usethis.quiet = FALSE, cli.width = Inf) expect_snapshot( use_lifecycle(), transform = scrub_testpkg ) expect_proj_file("man", "figures", "lifecycle-stable.svg") expect_equal(roxygen_ns_show(), "#' @importFrom lifecycle deprecated") }) usethis/tests/testthat/test-badge.R0000644000176200001440000000237414717524721017122 0ustar liggesuserstest_that("use_[cran|bioc]_badge() don't error", { create_local_package() expect_no_error(use_cran_badge()) expect_no_error(use_bioc_badge()) }) test_that("use_lifecycle_badge() handles bad and good input", { create_local_package() expect_snapshot(error = TRUE, { use_lifecycle_badge("eperimental") }) expect_no_error(use_lifecycle_badge("stable")) }) test_that("use_binder_badge() needs a github repository", { skip_if_no_git_user() create_local_project() use_git() expect_error(use_binder_badge(), class = "usethis_error_bad_github_remote_config") }) test_that("use_posit_cloud_badge() handles bad and good input", { create_local_project() expect_snapshot(use_posit_cloud_badge(), error = TRUE) expect_snapshot(use_posit_cloud_badge(123), error = TRUE) expect_snapshot(use_posit_cloud_badge("http://posit.cloud/123"), error = TRUE) expect_no_error(use_posit_cloud_badge("https://posit.cloud/content/123")) expect_no_error(use_posit_cloud_badge("https://posit.cloud/spaces/123/content/123")) }) test_that("use_badge() does nothing if badge seems to pre-exist", { create_local_package() href <- "https://cran.r-project.org/package=foo" writeLines(href, proj_path("README.md")) expect_false(use_badge("foo", href, "SRC")) }) usethis/tests/testthat/test-ignore.R0000644000176200001440000000026014651000165017317 0ustar liggesuserstest_that(". escaped around surround by anchors", { expect_equal(escape_path("."), "^\\.$") }) test_that("strip trailing /", { expect_equal(escape_path("./"), "^\\.$") }) usethis/tests/testthat/setup.R0000644000176200001440000000011214651000165016213 0ustar liggesuserswithr::local_options(usethis.quiet = TRUE, .local_envir = teardown_env()) usethis/tests/testthat.R0000644000176200001440000000007214651000165015060 0ustar liggesuserslibrary(testthat) library(usethis) test_check("usethis") usethis/tests/spelling.R0000644000176200001440000000021114651000165015030 0ustar liggesusersif (requireNamespace("spelling", quietly = TRUE)) { spelling::spell_check_test(vignettes = TRUE, error = FALSE, skip_on_cran = TRUE) } usethis/MD50000644000176200001440000005143514721320322012252 0ustar liggesusers8b006109db27b10315ad7239e2a2f422 *DESCRIPTION 764bb5aec2dc3bef4e44f1387ec895b3 *LICENSE a9e2f24550830fcf1c0cfaa02efe2964 *NAMESPACE fcdfef0f35f9c300aaf0f8dd8403e585 *NEWS.md eaff54e68339fd1440897277541f5f5b *R/addin.R 2da49b26bbf3da10d4e035145a921336 *R/author.R 5ec6b769b29ddcfa7448654f0e2fc4a9 *R/badge.R 80a4c61b572367c4b132982eb69e61f8 *R/block.R c82f36f0599aee2c80f30da9c1eccccc *R/browse.R 6899bcdcc853161b88a7dc126c5f5e1c *R/ci.R 20d25ce20e66f1c0611501d827cc3f71 *R/citation.R 0dd9347b52a03fc84647397c1ccf47f6 *R/code-of-conduct.R 9ba0a74fa1edd70e50521970ec24e310 *R/course.R ccc7801472e20b93277beaedf0166285 *R/coverage.R a254be9ba651355edb570fbd4e842eb2 *R/cpp11.R 269c9cd1fe56705bd5c3430a46f7366a *R/cran.R dd70f1b7c9f99a8e21b2c26d15de5ded *R/create.R 20a55fa4cb553df693e89018cb685684 *R/data-table.R 11d72f0eeebe25f45eb899ba6db2c688 *R/data.R e1139c1ff6cb5fc3eb59e1adf154caa3 *R/description.R a4903a49ede403db129362c34a75615e *R/directory.R 00122649f39beb29ee0ac4e4ee45ef4b *R/documentation.R 22a2b102e9230acdbb80524e1327cc79 *R/edit.R 89eee0d7de2a8484667cff3ba0376fdb *R/git-default-branch.R 9a28325d9c256f011563d78b451b43b8 *R/git.R 09c799a2757b23d33d00168d5fe3e5d8 *R/github-actions.R 1afddf58c47190dacba3c73f78919be8 *R/github-labels.R 7dae36c8241d8447b5ea1d60c905c5bf *R/github-pages.R b848bdbd0369005f0c029f7e33db0996 *R/github.R c1943c2cce305d43ded2f5d083c9fa32 *R/github_token.R 584ce8dc3a2bc3702feebdf316c8f14a *R/helpers.R 13a7c322ae38929340718e793b2769d6 *R/ignore.R 799bdb3344a8c0c98ff29fa5f7055f01 *R/import-standalone-obj-type.R f0073db6a780d16de572e063bbf1e6b5 *R/import-standalone-types-check.R d685dcac9840f57152c31428dcb91a68 *R/issue.R 97ba2f678e028bcd3ad06c9b27ab0082 *R/jenkins.R 0163973f46b9a9249d2428e83fd69aad *R/latest-dependencies.R 2b6d669ed8b994e6b036e1f76bbfb174 *R/license.R 13ca53c72206b29d7397111bdcd78a76 *R/lifecycle.R 5b9f7feee8c08e0fb1b48bedaa04060d *R/line-ending.R 584f8af73e9e709be960066e7d0c6343 *R/logo.R 9c6ec3c1aa124dfb95220bd201d1a46c *R/make.R ad770149b178cb814f8be00378347645 *R/namespace.R 7f583d73a1a2f40908886891b207db6b *R/news.R 9d72140c8ae9c7b2d994d43b2bd85078 *R/package.R d0699e7c6720831b60fb2fc2d79db8c9 *R/pipe.R 3b8decfdfb014776c5f53e4902ab05ac *R/pkgdown.R 54bcaa3b61c11b87d3ed71427d6c1fef *R/positron.R 2b396a698df1b3867132fe94a56db6fe *R/pr.R 30c2cfc2bc79eab19f1e748299da9549 *R/proj-desc.R 5069ab4205c605696934ae25a60e0e3f *R/proj.R d2e95165e3f885ca3586d277665820ef *R/r.R 38f5df4cd70888f06464ed8867502af5 *R/rcpp.R c0a756b4448cce3b0b5a2143b2ac33a8 *R/readme.R f5f6f212960248d2e3c22b6bcb647754 *R/release.R a96c1f899ff930e4c194c1c66881c778 *R/rename-files.R b2d49d1e0391ab14a2a98eba4cabc0fc *R/revdep.R 7de9a6985934982fdc6468f007c4cc79 *R/rmarkdown.R a0c1ad35eb127355a6867e2c5548900c *R/roxygen.R a5e9e22d1c1eeee43d19ddd3790afe23 *R/rprofile.R 2148e2716bf2c87267c8aee11f9d2b34 *R/rstudio.R f4c5fd5b16c91dff49a55589bdcb1591 *R/sitrep.R b6c4cdfd92eefc16a039fbff83986337 *R/spelling.R 6f20e8ef8a3151a611042baa31536ec8 *R/template.R 574d17b72889c5d4d9fbc1b423a524da *R/test.R f82677e3a0df8f2d6d5ad317edea579e *R/tibble.R d7abf64edc61fc182c81f9c01ad9e5fb *R/tidyverse.R 34f5f4f44d8ff7dc9fe7683bebb2e98b *R/tutorial.R 28d3fd1d83f4037d5f20b5f9d31f689f *R/ui-legacy.R f4b35117dd388119e3efc160ec691154 *R/upkeep.R d41d8cd98f00b204e9800998ecf8427e *R/use-compat-file.R 6b5aef4a32f96573ddf4a6c46f8a9552 *R/use_github_file.R 164a26cb4b7b85ad7988dce754d4081a *R/use_import_from.R 85bb4cf04cd3611ea0528ae426b7a0a3 *R/use_standalone.R ae1f2ed9d73bc99da42de8438152c5d6 *R/usethis-deprecated.R 3bfcdb8b8a561ca3e0bb6225ab3f8717 *R/usethis-package.R ec7e3c2a712363d1c59c88a88f1d23ba *R/utils-gh.R d4f4d8a0c69e1ff6f2e810dfd20db614 *R/utils-git.R c0acc410b49d2aad565789baddeddf56 *R/utils-github.R ab2402e681c0f52003a3513213217016 *R/utils-glue.R ed24074a12c2ed278906add668821c8b *R/utils-rematch2.R 341ba56cbb85e36afdfccc58547462c1 *R/utils-roxygen.R 760d1740476bff6fcd0deb3340e62a6c *R/utils-ui.R b68b72a952312e3239ae7f5395e30015 *R/utils.R b96286d5d257d19bc75954effa0c03ed *R/version.R ef3be806ab781866269955301b23db7d *R/vignette.R 727acddf70ede2e2229ba47447a87acd *R/vscode.R 7be217c0290425f40a2d0063e46d13ee *R/write.R cb70bcab7ec9f54dccaa5271ae69c5fe *README.md 9b206d70b99a02d2dcf3d11d43f780fc *inst/WORDLIST dbad05c8faf17514ed2f29c3fec13fed *inst/templates/CODE_OF_CONDUCT.md 3012f896ad62324283af46ec288ca3b5 *inst/templates/Jenkinsfile 1d9471302aec5402576ad12134bcecdc *inst/templates/Makefile 54e604cdc947c617672baa39efba9b3c *inst/templates/NEWS.md 2ef78790c51e3159c7e067a9c4565291 *inst/templates/addins.dcf 2fc4bbb9a984818ed8018545119b31a3 *inst/templates/article.Rmd 1a00cb3e02c869a49ad375e158551ca5 *inst/templates/article.qmd 86f115fb743f9009aeecdac11e0e0b31 *inst/templates/circleci-config.yml 8ab3a68040c7105bf03b2c4051a548fc *inst/templates/citation-template.R 3eec90e112c3f1af036433fb8e7b6365 *inst/templates/code-cpp11.cpp ea9234ef2e8fae457c79094c018603f7 *inst/templates/code.c 2950d8e59ca018b7632ef0a21c00fc23 *inst/templates/code.cpp 3df907b50c4f00bc26ee6d988e0c849f *inst/templates/codecov.yml 37497f21f3011dbabefd9659eeb087c0 *inst/templates/cran-comments.md 44c4b1df8df8ac69891ec238311f299c *inst/templates/gitlab-ci.yml 17b944f7f4423005e7402c2b2a7eead5 *inst/templates/junit-testthat.R fb01625ef01973490bcf4efb9ade23ae *inst/templates/license-AGPL-3.md 3d82780e8917b360cbee7b9ec3e40734 *inst/templates/license-GPL-2.md 29a9012941a6bcb26bf0fb4382c5dd75 *inst/templates/license-GPL-3.md 8f5107d98757711ecc1b07ac33877564 *inst/templates/license-LGPL-2.1.md c160dd417c123daff7a62852761d8706 *inst/templates/license-LGPL-3.md f4eda51018051de136d3b3742e9a7a40 *inst/templates/license-apache-2.md 3bedcaeda57cf8e31f791dd9e127eb0f *inst/templates/license-cc0.md 3e19557ffd5cbc68a1fd63ac8d687a95 *inst/templates/license-ccby-4.md 8c2921a5e3b7594dee4426ce2e641120 *inst/templates/license-mit.md c6c9f877dd63276ed8f5fb74c6c8356b *inst/templates/license-proprietary.txt 391f696f961e28914508628a7af31b74 *inst/templates/lifecycle-deprecated.svg 691b1eb2aec9e1bec96b79d11ba5e631 *inst/templates/lifecycle-experimental.svg ed42e3fbd7cc30bc6ca8fa9b658e24a8 *inst/templates/lifecycle-stable.svg bf2f1ad432ecccee3400afe533404113 *inst/templates/lifecycle-superseded.svg c8e06db9681d18ba05d63fa535647e9c *inst/templates/package-README 263378e6f4f25a1188a90af96673d314 *inst/templates/packagename-data-prep.R df6cc46bc7fae1a55b713f3d5065b35a *inst/templates/packagename-package.R b56ff2b193ecba52f9ec7f3b10cca008 *inst/templates/pipe.R cad0a0587138e044ecc67551ef2db19c *inst/templates/project-README 8c4b3f14fb3d6ffc74a28f361771b9c9 *inst/templates/readme-rmd-pre-commit.sh 7fa100ac70c48d6f71765ac00943b3cd *inst/templates/rmarkdown-template.Rmd 10d8de4f671a94595a1aacce4ac35620 *inst/templates/rmarkdown-template.yml 6fecee1ae482da9768c34d1e5e3dbf47 *inst/templates/template.Rproj 225e3812b897786befc574f805c96dc2 *inst/templates/test-example-2.1.R 58724a20c50eb8936bd9cb131d888c07 *inst/templates/testthat.R da2cae39083c588f2b6bc9a85e21fb87 *inst/templates/tidy-contributing.md 2d71e311a96120f28c4ed83edc487b05 *inst/templates/tidy-issue.md 46565db58495bfb67d23cde8a7529314 *inst/templates/tidy-support.md 0f285483be0392e2b606f7669e3a215c *inst/templates/tutorial-template.Rmd ed229c502d26f6c2d4de018e0f48ea66 *inst/templates/vignette.Rmd 85d120ab5975095fd0f86bad819b9a2d *inst/templates/vignette.qmd 9863deacd4949e7b7c92b680f927f290 *inst/templates/vscode-c_cpp_properties.json 02c1313d414157c6aea7104e1414f4e0 *inst/templates/vscode-debug.R 4257796be9c46045c3d7e617f478a7fb *inst/templates/vscode-launch.json 77639515db0fda8f3b1bffef4cfe3a74 *inst/templates/year-copyright.txt d926732618a54c1d9e08fc63c7956926 *man/badges.Rd 9ec3e52c01fccda3788d2bd0aba153f3 *man/browse-this.Rd 3863fd538da83e7970988c8e9233c832 *man/create_from_github.Rd 542ca433133fe3963aeca212943df2e8 *man/create_package.Rd 5a0a544de2c2f0b53157e26ff7fbac21 *man/edit.Rd 00198b1c402fc0a2a8af112e6a8e26d5 *man/edit_file.Rd 391f696f961e28914508628a7af31b74 *man/figures/lifecycle-deprecated.svg 691b1eb2aec9e1bec96b79d11ba5e631 *man/figures/lifecycle-experimental.svg ed42e3fbd7cc30bc6ca8fa9b658e24a8 *man/figures/lifecycle-stable.svg bf2f1ad432ecccee3400afe533404113 *man/figures/lifecycle-superseded.svg b314e2ce7ae989f3cabce731835719c1 *man/figures/logo.png d110c97a7282b95e81713bff0897674d *man/git-default-branch.Rd 3cbde807c1e09099c831c45146d22cd9 *man/git_branch_default.Rd 1a6b34a9b8c2fd09ba0606ffde10a463 *man/git_protocol.Rd 14f1b95291e4a757cb23f9c5ced67e2a *man/git_sitrep.Rd b2063e60670e36730f809377cbe6f27b *man/git_vaccinate.Rd 5fab5627366d695da95077ae624baca2 *man/github-token.Rd 9ed5bb4692580288ad6a725972fe0125 *man/issue-this.Rd 6003455227b2979dcedf27dddd73dd5d *man/licenses.Rd 6f7b59931da730d9ffae7a04b9b2c275 *man/proj_activate.Rd 27251420bd41c4ce26a4ed82580818f1 *man/proj_sitrep.Rd a58037b8e9c08f2b8da3ac83c113d177 *man/proj_utils.Rd 16277ed4c83aa33f729bddb61f0bb5a9 *man/pull-requests.Rd ace748d4a2737b0a8f7c7f5ee4a63b55 *man/rename_files.Rd a4b7550663efd206a22a42414bb776a4 *man/roxygen/templates/double-auth.R 6dbc7476bc0ad0e83ed5590344ce1586 *man/rprofile-helper.Rd de6cc7114cf7072dea567e978b499b26 *man/tidyverse.Rd 74398045ba613b5693a95d27cf5518ad *man/ui-legacy-functions.Rd b86ec9a6918adb482f3cf2e2bab6ec5f *man/ui-questions.Rd 4d061ce36c5867a899f3719b11b52709 *man/ui_silence.Rd e535458be477c38e6dcf25e94ff4a220 *man/use_addin.Rd b2be2b2f5e9895d83eb3367e5d4b4069 *man/use_author.Rd 41bc1648c59e65ce8fd2422e5c98fc1b *man/use_blank_slate.Rd 72e280c5a84e0a89d830217b599a9dc6 *man/use_build_ignore.Rd 5e970fc688b95c93593b334347b003b5 *man/use_citation.Rd 16ad570a3d2d6054239e4c2c93e5fd27 *man/use_code_of_conduct.Rd f60845b8ac1f27312ba17296ecb1681a *man/use_course_details.Rd 76d15762ea8ee81790b5b3c5c9324c89 *man/use_coverage.Rd da497f279f309b2af2575940fe8f607f *man/use_cpp11.Rd 7e314616b7b579d5e5fe753928899614 *man/use_cran_comments.Rd d751aa0ff35d2303bc9d2bc9357f361e *man/use_data.Rd cf15a2e99d0d2ecbf4405dac832ab72e *man/use_data_table.Rd f411f9d32d427486ad569d0147c4c158 *man/use_description.Rd bb84842ca4e3ab9906d17bfe2fe31362 *man/use_directory.Rd e267716593436e5853dde0d2b1de13a9 *man/use_git.Rd 1ca6fe11b56ff1abb311dc06d08f6534 *man/use_git_config.Rd 1e056a5ad2bff0d888bc79b11e287d03 *man/use_git_hook.Rd f83f636a6642dc870325a02c2e8ea6ca *man/use_git_ignore.Rd 95591cbb0f6c45521670522e33962939 *man/use_git_remote.Rd 4bf7a0147ff70e09d642067fc5e5f41b *man/use_github.Rd 14fdee90deee3d79ec9938807550520b *man/use_github_action.Rd e3cc3a6c52a13672e2c68ebaa07a2e47 *man/use_github_actions.Rd 7f741aafbcd602e50f57513ca59587e8 *man/use_github_actions_badge.Rd fdcdcd92ae0486d4090b323e7e157cbc *man/use_github_file.Rd da87aeb01232fb08471e71c545e70846 *man/use_github_labels.Rd b9406e49c4f173028d98c1c44ad63eb9 *man/use_github_links.Rd 74d592c912559772baf5f21edd796a93 *man/use_github_pages.Rd cd2ac7610d32559a330c90c4b7fc71b4 *man/use_github_release.Rd 44e24e300b3c0d5840ed555122ee92ea *man/use_gitlab_ci.Rd 3538a6b6ca991233a6e82471b09a9452 *man/use_import_from.Rd 9343e7601d112ff9a44fdf49568f9574 *man/use_jenkins.Rd 5fcc1bec2b4a3dca56ae9193193b5029 *man/use_latest_dependencies.Rd 3eb05b1cbeb13864410aa0d5f98db898 *man/use_lifecycle.Rd 1fefdf5cb71771db91e7b297788ac281 *man/use_logo.Rd 5f75bb0ea883860456f6755f24e9347d *man/use_make.Rd 7c0870e9a4e9d4d1fbed78956fdf5282 *man/use_namespace.Rd 38a27e7ac1ba57cfa91ea668c79239a7 *man/use_news_md.Rd a3357be84eabbf60a63d8c83ba7b13f9 *man/use_package.Rd e4f7f087e6a73f1bc3bceab15efbc16f *man/use_package_doc.Rd 66adeaa0144b65d160ed582e1973e43d *man/use_pipe.Rd 9efba958b7af7f8add4d28042d882f0f *man/use_pkgdown.Rd c171b7c298211fed0e9b6142ef1e4172 *man/use_r.Rd f9bdfeb7b6acf1e9867e9c6204b317ba *man/use_rcpp.Rd 544a25254eec61a4961205d6f0e3f99f *man/use_readme_rmd.Rd f55ba6b8d956146d5f8dbb81d4e5a0fc *man/use_release_issue.Rd 0d6a5201df770a0d3828272b8eec13f0 *man/use_revdep.Rd 2cb4c5b437beb2e0706f456723f5c915 *man/use_rmarkdown_template.Rd 29a7361318615c32f695c54e785569a7 *man/use_roxygen_md.Rd 7a5b3de1574e2e982a326442b3df4fdf *man/use_rscloud_badge.Rd 3bbf01ee5cd608593aea405026812cb8 *man/use_rstudio.Rd c5849571b61018b89c992c5719c19f00 *man/use_rstudio_preferences.Rd ad45bbad2f422897aabbe00b959f94c5 *man/use_spell_check.Rd c93aaafb7b3fd29be212b1a41d69ef7a *man/use_standalone.Rd 68f3cfe12ffea85b6885ac8a0f43f159 *man/use_template.Rd 84164842119b94496fe0fce9391d688e *man/use_test_helper.Rd 421ff926fd187eec34a359eb76b25ecd *man/use_testthat.Rd a0387113c37439f07fc73b354334d2eb *man/use_tibble.Rd 60dd5c1b6894e2cce37ddd4a6b1b088e *man/use_tidy_eval.Rd afe946572105ec9cc3ef9f7b1ea32b62 *man/use_tidy_thanks.Rd 4983290f4a1ca1c82765cabe41a810fd *man/use_tutorial.Rd b34f4f8cbd468ace853c53b2828ca88a *man/use_upkeep_issue.Rd fc33e630559d9199388bed7149db86a9 *man/use_version.Rd 0e1fe4bb836a0035f95398f6a467a0d6 *man/use_vignette.Rd ab8febf8cb383eda497d5b08a065e9bf *man/usethis-package.Rd b1adbee6bbcb20cbe86a7c3e59899bf6 *man/usethis_options.Rd 45eb78bd0e8e9dd7237d4b22f9c8524b *man/write-this.Rd 692add80e2951398223a8039c8bc2460 *man/zip-utils.Rd d4bbddb01053b52c56f14f47a306e0c1 *tests/spelling.R 3d8bfd8d9e35decfeadde3ffa53fbffd *tests/testthat.R acdf6b440fcf4bed9d3b60b83d6350ad *tests/testthat/_snaps/author.md 19cfac83a52ddc162cd9b09de76d3581 *tests/testthat/_snaps/badge.md 86ed7db4230e53bad6adcf9d34f8918f *tests/testthat/_snaps/course.md b4854848fcb6f114564b0b2a84811e83 *tests/testthat/_snaps/coverage.md 36e6f2ab1a61e6ecfc01527b8714f71a *tests/testthat/_snaps/cpp11.md e3bc97eb4abb1a5ef9430df03f536302 *tests/testthat/_snaps/data-table.md 45d60ab4ac21d013eefdffb31a45dd05 *tests/testthat/_snaps/git-default-branch.md 92d8ca640559ce60d7a6ccf3ebd9aedc *tests/testthat/_snaps/github-actions.md c5056c452627548ff7a61bc85d973a51 *tests/testthat/_snaps/github.md 6d8b252a5aed2f0a2e2450ac91861f1e *tests/testthat/_snaps/helpers.md 34a302adc626c3921ec3e9bb64db3590 *tests/testthat/_snaps/lifecycle.md cf21ad2c8d37f0b03c2ea5459c7984e7 *tests/testthat/_snaps/logo.md 57302a46704ee0934416884b080d3215 *tests/testthat/_snaps/news.md 502b5805d17b22d776941215895b43b2 *tests/testthat/_snaps/package.md 1dad501f727e932b45a3d91d99f63628 *tests/testthat/_snaps/pipe.md 5b2fe80f2bbf252dec30103ce73fe32d *tests/testthat/_snaps/pkgdown.md e45869b2c0d39910bedeb8f28cc8150a *tests/testthat/_snaps/proj-desc.md 6c650f7c9b9374267a878fbeb89a5637 *tests/testthat/_snaps/proj.md 5f3b4338e9534f948656349c58096540 *tests/testthat/_snaps/r.md 48a49bf533b15b2f370b5f4e504ca422 *tests/testthat/_snaps/readme.md 2cbf3aa1f1d1da90b4a6ad00d9e01009 *tests/testthat/_snaps/release.md 56c3795881f54561cf4feca2c21a5622 *tests/testthat/_snaps/rename-files.md 410d84e97bf0f21d81153d3739d698a6 *tests/testthat/_snaps/roxygen.md 8b3d49918a71e03210461e6dd7704b9f *tests/testthat/_snaps/rstudio.md d595c38fedc9d555b10add274cbb4266 *tests/testthat/_snaps/tibble.md fa1fbf43e4b0e7d4ce916b1d8f8e57df *tests/testthat/_snaps/tidyverse.md c0159b27dfafc9e4fd6eaa8000d42fda *tests/testthat/_snaps/tutorial.md 6acb1a1b94997c023dde87c1e688d01e *tests/testthat/_snaps/ui-legacy.md f8ec51de1f10686627e18f50d1d79ac3 *tests/testthat/_snaps/upkeep.md bb157caf1cf14411a414b512d0f9b3ca *tests/testthat/_snaps/use_import_from.md a79989a69270e5ce12159604343e898a *tests/testthat/_snaps/use_standalone.md f51bdadd6e72b1eba505d538bcdad07c *tests/testthat/_snaps/usethis-deprecated.md e06ced4582f12d4afeb87e8074128f76 *tests/testthat/_snaps/utils-github.md 2bb57d1f976fa09355d745ac8620e4fd *tests/testthat/_snaps/utils-ui.md 172e7f9d9a859119dee670c8711ff4e7 *tests/testthat/_snaps/utils.md c381c94226afc98a4afb7e0394e28dff *tests/testthat/_snaps/version.md c12614deaa7134ba7125605e35a0a173 *tests/testthat/_snaps/vignette.md d41a5c51361a021de747121bd4480eec *tests/testthat/_snaps/write.md 3157c8965ab50cd26a6450a97b1c4be1 *tests/testthat/helper-mocks.R 08754cc45e83b36424c7a00dfc1686a0 *tests/testthat/helper.R b71942c454b72e4396689f5cd1330691 *tests/testthat/ref/README.Rmd df36be9e7a6c7b7403470e0641cfc571 *tests/testthat/ref/README.md 4013006b447ecd4f5ff17d6aaba6ad26 *tests/testthat/ref/foo-explicit-parent.zip bf028a82853c1066245b87981607b2a7 *tests/testthat/ref/foo-implicit-parent.zip bf89b424b9fbb8d488d29e701d174437 *tests/testthat/ref/foo-loose-dropbox.zip bec26ea41acec7ac3cfa0750ca1a911a *tests/testthat/ref/foo-no-parent.zip 44f3b3cab6cbfc399258c007e0a1359f *tests/testthat/ref/foo/file.txt 9135e2ab2a5c08b08265ccbde1f6f2a3 *tests/testthat/ref/yo-explicit-parent.zip 944b7eabdfefa46aa5b35c019bdd4ae8 *tests/testthat/ref/yo-implicit-parent.zip 67f5e8d94d8c417642d28f4ed96adc06 *tests/testthat/ref/yo-loose-dropbox.zip 0009ccc1c812cc0a4b7e763290c8eab3 *tests/testthat/ref/yo-no-parent.zip 58a1423de7e870c98b9fb948ea4d9f9c *tests/testthat/ref/yo/subdir1/file1.txt a783d3efd49413330a902d71111392f8 *tests/testthat/ref/yo/subdir2/file2.txt ec2e5afe355da6c62385de8900afda93 *tests/testthat/setup.R be3f37f889600d912c941dc32561f7e6 *tests/testthat/test-addin.R f0499c970061bd9ba91059ff33d3c77f *tests/testthat/test-author.R a008a8b89606ea789999f49dac4df1f9 *tests/testthat/test-badge.R b7fb3ef24a5baed105e5a75eb9b95638 *tests/testthat/test-block.R 52ff4420edafd87e59d0e51e7d58a572 *tests/testthat/test-browse.R 29f7bd6e4df2b0633fdcadc107e29601 *tests/testthat/test-ci.R 06ababbf891a702f97ab503e27867f3c *tests/testthat/test-citation.R b279bd713dabebad7aec7a771c1fcb05 *tests/testthat/test-code-of-conduct.R b3a232d7e3e4072fa7bd72aefb7482b1 *tests/testthat/test-course.R ac651839e7a400c023e382eb6d589625 *tests/testthat/test-coverage.R 0d1eb4d3f9a88edd4b29bf8b9b6a6bf0 *tests/testthat/test-cpp11.R 429fff6097b4739fcf08c5aea867bf66 *tests/testthat/test-cran.R 03c3831e24727d801a3d24c739b73d01 *tests/testthat/test-create.R 3472bff77547bbb83fab15834b764b3b *tests/testthat/test-data-table.R 48014a234b61efdfdf66878056da70b1 *tests/testthat/test-data.R b5b90c8ca255df4c5b3798bef2847088 *tests/testthat/test-description.R 008a20cdc134220ce7ceaba1d3c1ffb9 *tests/testthat/test-directory.R 94089899f583791ac60e4ddff0ea0e44 *tests/testthat/test-documentation.R 35534bbf4d536ff1957a2f01143d8e34 *tests/testthat/test-edit.R 8db9e390cd28a09df0362675b7e77035 *tests/testthat/test-git-default-branch.R d68305912cf0ddb820c505695f9e0d46 *tests/testthat/test-git.R 531dbb12dda697b51c1da9772c8d053c *tests/testthat/test-github-actions.R 27c3e2f4a082326a5a775d38361fe942 *tests/testthat/test-github.R 2e21c8b76fc237dc073fbdf0f2df0f94 *tests/testthat/test-github_token.R fb5ac8c110cec0758c4799e32762ccad *tests/testthat/test-helpers.R 78f6758cbac1d0bcb01ffacac2de48ad *tests/testthat/test-ignore.R 2cbfec6ac278e91cf278c4b201144b84 *tests/testthat/test-jenkins.R 8773a7c7e98799999e4989aba4110556 *tests/testthat/test-latest-dependencies.R 4c18e519bd366323f586843c5e5a3d5c *tests/testthat/test-license.R c4ceb1afc07f4d7657c58d9ddac8347d *tests/testthat/test-lifecycle.R 31b740bd1cf5746d21abe31de7f33e2f *tests/testthat/test-line-ending.R faa1596b72fce14869abc977b886e3b0 *tests/testthat/test-logo.R 2dbba0b9cb53d8a1257fcd97e4c407e1 *tests/testthat/test-make.R 86a52d264a0523b7366c14d0bda3350b *tests/testthat/test-news.R 8a6cdf7602b5f3c8790c2d90dcbb498d *tests/testthat/test-package.R d6726884cddf41136da1c451e575eb35 *tests/testthat/test-pipe.R c15e966765b399ee0f9fdc5c32eb985a *tests/testthat/test-pkgdown.R df955c3c51bf1dc38c0f563b4623bece *tests/testthat/test-proj-desc.R 33cea64e51008f8eb4d80789f6278ef7 *tests/testthat/test-proj.R 9d9a055bb5eac01e50fc26b333257f76 *tests/testthat/test-r.R c4f36abe3c8e08b27a9796e4d9cdd779 *tests/testthat/test-rcpp.R 5bd15a12c837e8303cde01022ee99faa *tests/testthat/test-readme.R 7619f00434f0d913c403bb6fa6be40a9 *tests/testthat/test-release.R 5ae115aa29db0b14584c207fa49e8288 *tests/testthat/test-rename-files.R 2ef54e045721de029f32fad429b5a40f *tests/testthat/test-revdep.R 7b2a47558f48da375a38d084477ad781 *tests/testthat/test-rmarkdown.R b6460d248269106c563879346580cc0b *tests/testthat/test-roxygen.R c53db495d54843c7a3c8e381d7d66a3e *tests/testthat/test-rstudio.R 8c203d2f00b02ced2f485be3145ec48c *tests/testthat/test-template.R 02b6cbe72fab99b5cc68fac1bc61109d *tests/testthat/test-test.R 01ff827ba9e0d58b71552e7bc2cd3c6f *tests/testthat/test-testthat.R 58786b4face0c7fd395310425f2f6919 *tests/testthat/test-tibble.R 108be17e56857044315ab5741c68511c *tests/testthat/test-tidyverse.R 757806994ce9dbcbed5318d1e550b8d6 *tests/testthat/test-tutorial.R 392289dc00f3592d32530311635a0dc8 *tests/testthat/test-ui-legacy.R b5361e21ee5626da0003968d76eb06b3 *tests/testthat/test-upkeep.R bb57f3391161b0658c4aa551b2964c20 *tests/testthat/test-use_github_file.R 5e4c283142183893f3349fdb577aa092 *tests/testthat/test-use_import_from.R 2766fda838df3f1b0ec3c4dc57f5407c *tests/testthat/test-use_standalone.R 3a64e9dd94daf5bb4ac87b8d1308aaff *tests/testthat/test-usethis-deprecated.R 26e15f727f0ff5893fad157095c26775 *tests/testthat/test-utils-git.R fba039b537a97fe78944906afbe2ca1b *tests/testthat/test-utils-github.R 2d3170981c90ef8b0ab07df4bef48d82 *tests/testthat/test-utils-glue.R 8341e79f27849f0dc2c3c4309bb469d0 *tests/testthat/test-utils-ui.R c666960b2d60f0011eeeb850659fe416 *tests/testthat/test-utils.R 38143a3032efaa2608e8f5dff5f87174 *tests/testthat/test-version.R 2e63f2f402c523a1f15555403a43f93d *tests/testthat/test-vignette.R 9e29eb791e241509e66c9e08ff4ddd23 *tests/testthat/test-write.R usethis/R/0000755000176200001440000000000014721145632012144 5ustar liggesusersusethis/R/github-labels.R0000644000176200001440000002370414717524721015024 0ustar liggesusers#' Manage GitHub issue labels #' #' @description #' `use_github_labels()` can create new labels, update colours and descriptions, #' and optionally delete GitHub's default labels (if `delete_default = TRUE`). #' It will never delete labels that have associated issues. #' #' `use_tidy_github_labels()` calls `use_github_labels()` with tidyverse #' conventions powered by `tidy_labels()`, `tidy_labels_rename()`, #' `tidy_label_colours()` and `tidy_label_descriptions()`. #' #' ## tidyverse label usage #' Labels are used as part of the issue-triage process, designed to minimise the #' time spent re-reading issues. The absence of a label indicates that an issue #' is new, and has yet to be triaged. #' #' There are four mutually exclusive labels that indicate the overall "type" of #' issue: #' #' * `bug`: an unexpected problem or unintended behavior. #' * `documentation`: requires changes to the docs. #' * `feature`: feature requests and enhancement. #' * `upkeep`: general package maintenance work that makes future development #' easier. #' #' Then there are five labels that are needed in most repositories: #' #' * `breaking change`: issue/PR will requires a breaking change so should #' be not be included in patch releases. #' * `reprex` indicates that an issue does not have a minimal reproducible #' example, and that a reply has been sent requesting one from the user. #' * `good first issue` indicates a good issue for first-time contributors. #' * `help wanted` indicates that a maintainer wants help on an issue. #' * `wip` indicates that someone is working on it or has promised to. #' #' Finally most larger repos will accumulate their own labels for specific #' areas of functionality. For example, usethis has labels like "description", #' "paths", "readme", because time has shown these to be common sources of #' problems. These labels are helpful for grouping issues so that you can #' tackle related problems at the same time. #' #' Repo-specific issues should have a grey background (`#eeeeee`) and an emoji. #' This keeps the issue page visually harmonious while still giving enough #' variation to easily distinguish different types of label. #' #' @param labels A character vector giving labels to add. #' @param rename A named vector with names giving old names and values giving #' new names. #' @param colours,descriptions Named character vectors giving hexadecimal #' colours (like `e02a2a`) and longer descriptions. The names should match #' label names, and anything unmatched will be left unchanged. If you create a #' new label, and don't supply colours, it will be given a random colour. #' @param delete_default If `TRUE`, removes GitHub default labels that do not #' appear in the `labels` vector and that do not have associated issues. #' #' @export #' @examples #' \dontrun{ #' # typical use in, e.g., a new tidyverse project #' use_github_labels(delete_default = TRUE) #' #' # create labels without changing colours/descriptions #' use_github_labels( #' labels = c("foofy", "foofier", "foofiest"), #' colours = NULL, #' descriptions = NULL #' ) #' #' # change descriptions without changing names/colours #' use_github_labels( #' labels = NULL, #' colours = NULL, #' descriptions = c("foofiest" = "the foofiest issue you ever saw") #' ) #' } use_github_labels <- function(labels = character(), rename = character(), colours = character(), descriptions = character(), delete_default = FALSE) { tr <- target_repo(github_get = TRUE, ok_configs = c("ours", "fork")) check_can_push(tr = tr, "to modify labels") gh <- gh_tr(tr) cur_labels <- gh("GET /repos/{owner}/{repo}/labels") label_attr <- function(x, l, mapper = map_chr) { mapper(l, x, .default = NA) } # Rename existing labels cur_label_names <- label_attr("name", cur_labels) to_rename <- intersect(cur_label_names, names(rename)) if (length(to_rename) > 0) { dat <- data.frame(from = to_rename, to = rename[to_rename]) delta <- glue_data( dat, "{.val <>} {cli::symbol$arrow_right} {.val <>}", .open = "<<", .close = ">>" ) ui_bullets(c( "v" = "Renaming labels:", bulletize(delta) )) # Can't do this at label level, i.e. "old_label_name --> new_label_name" # Fails if "new_label_name" already exists # https://github.com/r-lib/usethis/issues/551 # Must first PATCH issues, then sort out labels issues <- map( to_rename, ~ gh("GET /repos/{owner}/{repo}/issues", labels = .x) ) issues <- purrr::flatten(issues) number <- map_int(issues, "number") old_labels <- map(issues, "labels") df <- data.frame( number = rep.int(number, lengths(old_labels)) ) df$labels <- purrr::flatten(old_labels) df$labels <- map_chr(df$labels, "name") # enact relabelling m <- match(df$labels, names(rename)) df$labels[!is.na(m)] <- rename[m[!is.na(m)]] df <- df[!duplicated(df), ] new_labels <- split(df$labels, df$number) purrr::iwalk( new_labels, ~ gh( "PATCH /repos/{owner}/{repo}/issues/{issue_number}", issue_number = .y, labels = I(.x) ) ) # issues have correct labels now; safe to edit labels themselves purrr::walk( to_rename, ~ gh("DELETE /repos/{owner}/{repo}/labels/{name}", name = .x) ) labels <- union(labels, setdiff(rename, cur_label_names)) } else { ui_bullets(c("i" = "No labels need renaming.")) } cur_labels <- gh("GET /repos/{owner}/{repo}/labels") cur_label_names <- label_attr("name", cur_labels) # Add missing labels if (all(labels %in% cur_label_names)) { ui_bullets(c("i" = "No new labels needed.")) } else { to_add <- setdiff(labels, cur_label_names) ui_bullets(c( "v" = "Adding missing labels:", bulletize(usethis_map_cli(to_add)) )) for (label in to_add) { gh( "POST /repos/{owner}/{repo}/labels", name = label, color = purrr::pluck(colours, label, .default = random_colour()), description = purrr::pluck(descriptions, label, .default = "") ) } } cur_labels <- gh("GET /repos/{owner}/{repo}/labels") cur_label_names <- label_attr("name", cur_labels) # Update colours cur_label_colours <- set_names( label_attr("color", cur_labels), cur_label_names ) if (identical(cur_label_colours[names(colours)], colours)) { ui_bullets(c("i" = "Label colours are up-to-date.")) } else { to_update <- intersect(cur_label_names, names(colours)) ui_bullets(c( "v" = "Updating colours:", bulletize(usethis_map_cli(to_update)) )) for (label in to_update) { gh( "PATCH /repos/{owner}/{repo}/labels/{name}", name = label, color = colours[[label]] ) } } # Update descriptions cur_label_descriptions <- set_names( label_attr("description", cur_labels), cur_label_names ) if (identical(cur_label_descriptions[names(descriptions)], descriptions)) { ui_bullets(c("i" = "Label descriptions are up-to-date.")) } else { to_update <- intersect(cur_label_names, names(descriptions)) ui_bullets(c( "v" = "Updating descriptions:", bulletize(usethis_map_cli(to_update)) )) for (label in to_update) { gh( "PATCH /repos/{owner}/{repo}/labels/{name}", name = label, description = descriptions[[label]] ) } } # Delete unused default labels if (delete_default) { default <- map_lgl(cur_labels, "default") to_remove <- setdiff(cur_label_names[default], labels) if (length(to_remove) > 0) { ui_bullets(c( "v" = "Removing default labels:", bulletize(usethis_map_cli(to_remove)) )) for (label in to_remove) { issues <- gh("GET /repos/{owner}/{repo}/issues", labels = label) if (length(issues) > 0) { ui_bullets(c( "_" = "Delete {.val {label}} label manually; it has associated issues." )) } else { gh("DELETE /repos/{owner}/{repo}/labels/{name}", name = label) } } } } } #' @export #' @rdname use_github_labels use_tidy_github_labels <- function() { use_github_labels( labels = tidy_labels(), rename = tidy_labels_rename(), colours = tidy_label_colours(), descriptions = tidy_label_descriptions(), delete_default = TRUE ) } #' @rdname use_github_labels #' @export tidy_labels <- function() { names(tidy_label_colours()) } #' @rdname use_github_labels #' @export tidy_labels_rename <- function() { c( # before = after "enhancement" = "feature", "question" = "reprex", "good first issue" = "good first issue :heart:", "help wanted" = "help wanted :heart:", "docs" = "documentation" ) } #' @rdname use_github_labels #' @export tidy_label_colours <- function() { # http://tristen.ca/hcl-picker/#/hlc/5/0.26/E0B3A2/E1B996 c( "breaking change :skull_and_crossbones:" = "E0B3A2", "bug" = "E0B3A2", "documentation" = "CBBAB8", "feature" = "B4C3AE", "upkeep" = "C2ACC0", "good first issue :heart:" = "CBBAB8", "help wanted :heart:" = "C5C295", "reprex" = "C5C295", "tidy-dev-day :nerd_face:" = "CBBAB8" ) } #' @rdname use_github_labels #' @export tidy_label_descriptions <- function() { c( "bug" = "an unexpected problem or unintended behavior", "feature" = "a feature request or enhancement", "upkeep" = "maintenance, infrastructure, and similar", "reprex" = "needs a minimal reproducible example", "wip" = "work in progress", "documentation" = "", "good first issue :heart:" = "good issue for first-time contributors", "help wanted :heart:" = "we'd love your help!", "breaking change :skull_and_crossbones:" = "API change likely to affect existing code", "tidy-dev-day :nerd_face:" = "Tidyverse Developer Day" ) } random_colour <- function() { format(as.hexmode(sample(256 * 256 * 256 - 1, 1)), width = 6) } usethis/R/utils-roxygen.R0000644000176200001440000000071214651000165015111 0ustar liggesusers# functions to help reduce duplication and increase consistency in the docs # repo_spec ---- param_repo_spec <- function(...) { template <- glue(" @param repo_spec \\ Optional GitHub repo specification in this form: `owner/repo`. \\ This can usually be inferred from the GitHub remotes of active \\ project. ") dots <- list2(...) if (length(dots) > 0) { template <- c(template, dots) } glue_collapse(template, sep = " ") } usethis/R/git.R0000644000176200001440000004121114717524721013056 0ustar liggesusers#' Initialise a git repository #' #' `use_git()` initialises a Git repository and adds important files to #' `.gitignore`. If user consents, it also makes an initial commit. #' #' @param message Message to use for first commit. #' @family git helpers #' @export #' @examples #' \dontrun{ #' use_git() #' } use_git <- function(message = "Initial commit") { needs_init <- !uses_git() if (needs_init) { ui_bullets(c("v" = "Initialising Git repo.")) git_init() # hacky but helps prevent a pop-up in Positron, where early attempts to # interact with a newly created repo lead to: # Git: There are no available repositories # https://github.com/r-lib/usethis/pull/2011#issue-2380380721 if (is_positron()) { Sys.sleep(1) } } use_git_ignore(git_ignore_lines) if (git_uncommitted(untracked = TRUE)) { git_ask_commit(message, untracked = TRUE) } if (needs_init && !is_positron()) { restart_rstudio("A restart of RStudio is required to activate the Git pane.") } invisible(TRUE) } #' Add a git hook #' #' Sets up a git hook using the specified script. Creates a hook directory if #' needed, and sets correct permissions on hook. #' #' @param hook Hook name. One of "pre-commit", "prepare-commit-msg", #' "commit-msg", "post-commit", "applypatch-msg", "pre-applypatch", #' "post-applypatch", "pre-rebase", "post-rewrite", "post-checkout", #' "post-merge", "pre-push", "pre-auto-gc". #' @param script Text of script to run #' @family git helpers #' @export use_git_hook <- function(hook, script) { check_uses_git() hook_path <- proj_path(".git", "hooks", hook) create_directory(path_dir(hook_path)) write_over(hook_path, script) file_chmod(hook_path, "0744") invisible() } #' Tell Git to ignore files #' #' @param ignores Character vector of ignores, specified as file globs. #' @param directory Directory relative to active project to set ignores #' @family git helpers #' @export use_git_ignore <- function(ignores, directory = ".") { write_union(proj_path(directory, ".gitignore"), ignores) rstudio_git_tickle() } #' Configure Git #' #' Sets Git options, for either the user or the project ("global" or "local", in #' Git terminology). Wraps [gert::git_config_set()] and #' [gert::git_config_global_set()]. To inspect Git config, see #' [gert::git_config()]. #' #' @param ... Name-value pairs, processed as #' <[`dynamic-dots`][rlang::dyn-dots]>. #' #' @return Invisibly, the previous values of the modified components, as a named #' list. #' @inheritParams edit #' #' @family git helpers #' @export #' @examples #' \dontrun{ #' # set the user's global user.name and user.email #' use_git_config(user.name = "Jane", user.email = "jane@example.org") #' #' # set the user.name and user.email locally, i.e. for current repo/project #' use_git_config( #' scope = "project", #' user.name = "Jane", #' user.email = "jane@example.org" #' ) #' } use_git_config <- function(scope = c("user", "project"), ...) { scope <- match.arg(scope) dots <- list2(...) stopifnot(is_dictionaryish(dots)) orig <- stats::setNames( vector(mode = "list", length = length(dots)), names(dots) ) for (i in seq_along(dots)) { nm <- names(dots)[[i]] vl <- dots[[i]] if (scope == "user") { orig[nm] <- git_cfg_get(nm, "global") %||% list(NULL) gert::git_config_global_set(nm, vl) } else { check_uses_git() orig[nm] <- git_cfg_get(nm, "local") %||% list(NULL) gert::git_config_set(nm, vl, repo = git_repo()) } } invisible(orig) } #' See or set the default Git protocol #' #' @description #' Git operations that address a remote use a so-called "transport protocol". #' usethis supports HTTPS and SSH. The protocol dictates the Git URL format used #' when usethis needs to configure the first GitHub remote for a repo: #' * `protocol = "https"` implies `https://github.com//.git` #' * `protocol = "ssh"` implies `git@@github.com:/.git` #' #' Two helper functions are available: #' * `git_protocol()` reveals the protocol "in force". As of usethis v2.0.0, #' this defaults to "https". You can change this for the duration of the #' R session with `use_git_protocol()`. Change the default for all R #' sessions with code like this in your `.Rprofile` (easily editable via #' [edit_r_profile()]): #' ``` #' options(usethis.protocol = "ssh") #' ``` #' * `use_git_protocol()` sets the Git protocol for the current R session #' #' This protocol only affects the Git URL for newly configured remotes. All #' existing Git remote URLs are always respected, whether HTTPS or SSH. #' #' @param protocol One of "https" or "ssh" #' #' @return The protocol, either "https" or "ssh" #' @export #' #' @examples #' \dontrun{ #' git_protocol() #' #' use_git_protocol("ssh") #' git_protocol() #' #' use_git_protocol("https") #' git_protocol() #' } git_protocol <- function() { protocol <- tolower(getOption("usethis.protocol", "unset")) if (identical(protocol, "unset")) { ui_bullets(c("i" = "Defaulting to {.val https} Git protocol.")) protocol <- "https" } else { check_protocol(protocol) } options("usethis.protocol" = protocol) getOption("usethis.protocol") } #' @rdname git_protocol #' @export use_git_protocol <- function(protocol) { options("usethis.protocol" = protocol) invisible(git_protocol()) } check_protocol <- function(protocol) { if (!is_string(protocol) || !(tolower(protocol) %in% c("https", "ssh"))) { options(usethis.protocol = NULL) ui_abort("{.arg protocol} must be either {.val https} or {.val ssh}.") } invisible() } #' Configure and report Git remotes #' #' Two helpers are available: #' * `use_git_remote()` sets the remote associated with `name` to `url`. #' * `git_remotes()` reports the configured remotes, similar to #' `git remote -v`. #' #' @param name A string giving the short name of a remote. #' @param url A string giving the url of a remote. #' @param overwrite Logical. Controls whether an existing remote can be #' modified. #' #' @return Named list of Git remotes. #' @export #' #' @examples #' \dontrun{ #' # see current remotes #' git_remotes() #' #' # add new remote named 'foo', a la `git remote add ` #' use_git_remote(name = "foo", url = "https://github.com//.git") #' #' # remove existing 'foo' remote, a la `git remote remove ` #' use_git_remote(name = "foo", url = NULL, overwrite = TRUE) #' #' # change URL of remote 'foo', a la `git remote set-url ` #' use_git_remote( #' name = "foo", #' url = "https://github.com//.git", #' overwrite = TRUE #' ) #' #' # Scenario: Fix remotes when you cloned someone's repo, but you should #' # have fork-and-cloned (in order to make a pull request). #' #' # Store origin = main repo's URL, e.g., "git@github.com:/.git" #' upstream_url <- git_remotes()[["origin"]] #' #' # IN THE BROWSER: fork the main GitHub repo and get your fork's remote URL #' my_url <- "git@github.com:/.git" #' #' # Rotate the remotes #' use_git_remote(name = "origin", url = my_url) #' use_git_remote(name = "upstream", url = upstream_url) #' git_remotes() #' #' # Scenario: Add upstream remote to a repo that you fork-and-cloned, so you #' # can pull upstream changes. #' # Note: If you fork-and-clone via `usethis::create_from_github()`, this is #' # done automatically! #' #' # Get URL of main GitHub repo, probably in the browser #' upstream_url <- "git@github.com:/.git" #' use_git_remote(name = "upstream", url = upstream_url) #' } use_git_remote <- function(name = "origin", url, overwrite = FALSE) { check_name(name) maybe_name(url) check_bool(overwrite) remotes <- git_remotes() repo <- git_repo() if (name %in% names(remotes) && !overwrite) { ui_abort(c( "Remote {.val {name}} already exists.", "Use {.code overwrite = TRUE} to edit it anyway." )) } if (name %in% names(remotes)) { if (is.null(url)) { gert::git_remote_remove(remote = name, repo = repo) } else { gert::git_remote_set_url(url = url, remote = name, repo = repo) } } else if (!is.null(url)) { gert::git_remote_add(url = url, name = name, repo = repo) } invisible(git_remotes()) } #' @rdname use_git_remote #' @export git_remotes <- function() { x <- gert::git_remote_list(repo = git_repo()) if (nrow(x) == 0) { return(NULL) } stats::setNames(as.list(x$url), x$name) } # unexported function to improve my personal quality of life git_clean <- function() { if (!is_interactive() || !uses_git()) { return(invisible()) } st <- gert::git_status(staged = FALSE, repo = git_repo()) paths <- st[st$status == "new", ][["file"]] n <- length(paths) if (n == 0) { ui_bullets(c("i" = "Found no untracked files.")) return(invisible()) } paths <- sort(paths) ui_paths <- map_chr(paths, ui_path_impl) ui_bullets(c( "i" = "{cli::qty(n)}There {?is/are} {n} untracked file{?s}:", bulletize(usethis_map_cli(ui_paths, template = "{.file <>}")) )) if (ui_yep( "{cli::qty(n)}Do you want to remove {?it/them}?", yes = "yes", no = "no", shuffle = FALSE)) { file_delete(paths) ui_bullets(c("v" = "{n} file{?s} deleted.")) } rstudio_git_tickle() invisible() } #' Git/GitHub sitrep #' #' Get a situation report on your current Git/GitHub status. Useful for #' diagnosing problems. The default is to report all values; provide values #' for `tool` or `scope` to be more specific. #' #' @param tool Report for __git__, or __github__ #' @param scope Report globally for the current __user__, or locally for the #' current __project__ #' #' @export #' @examples #' \dontrun{ #' # report all #' git_sitrep() #' #' # report git for current user #' git_sitrep("git", "user") #' } git_sitrep <- function(tool = c("git", "github"), scope = c("user", "project")) { tool <- rlang::arg_match(tool, multiple = TRUE) scope <- rlang::arg_match(scope, multiple = TRUE) ui_silence(try(proj_get(), silent = TRUE)) # git (global / user) -------------------------------------------------------- init_default_branch <- git_cfg_get("init.defaultBranch", where = "global") if ("git" %in% tool && "user" %in% scope) { cli::cli_h3("Git global (user)") git_user_sitrep("user") kv_line( "Global (user-level) gitignore file", I("{.path {git_ignore_path('user')}}") ) vaccinated <- git_vaccinated() kv_line("Vaccinated", vaccinated) if (!vaccinated) { ui_bullets(c("i" = "See {.fun usethis::git_vaccinate} to learn more.")) } kv_line("Default Git protocol", ui_silence(git_protocol())) kv_line("Default initial branch name", init_default_branch) } # github (global / user) ----------------------------------------------------- default_gh_host <- get_hosturl(default_api_url()) if ("github" %in% tool && "user" %in% scope) { cli::cli_h3("GitHub user") kv_line("Default GitHub host", default_gh_host) pat_sitrep(default_gh_host, scope = "user") } # git and github for active project ------------------------------------------ if (!"project" %in% scope) { return(invisible()) } if (!proj_active()) { ui_bullets(c("i" = "No active usethis project.")) return(invisible()) } cli::cli_h2("Active usethis project: {.val {proj_get()}}") if (!uses_git()) { ui_bullets(c("i" = "Active project is not a Git repo.")) return(invisible()) } # current branch ------------------------------------------------------------- branch <- tryCatch(git_branch(), error = function(e) NULL) tracking_branch <- if (is.null(branch)) NA_character_ else git_branch_tracking() if (is.null(branch)) { branch <- cli::format_inline(ui_special()) } else { branch <- cli::format_inline("{.val {branch}}") } if (is.na(tracking_branch)) { tracking_branch <- cli::format_inline(ui_special()) } else { tracking_branch <- cli::format_inline("{.val {tracking_branch}}") } # local git config ----------------------------------------------------------- if ("git" %in% tool) { cli::cli_h3("Git local (project)") git_user_sitrep("project") # default branch ------------------------------------------------------------- default_branch_sitrep() # vertical alignment would make this nicer, but probably not worth it ui_bullets(c( "*" = "Current local branch {cli::symbol$arrow_right} remote tracking branch:", " " = "{branch} {cli::symbol$arrow_right} {tracking_branch}" )) } # GitHub remote config ------------------------------------------------------- if ("github" %in% tool) { cli::cli_h3("GitHub project") cfg <- github_remote_config() if (cfg$type == "no_github") { ui_bullets(c("i" = "Project does not use GitHub.")) return(invisible()) } repo_host <- cfg$host_url if (!is.na(repo_host) && repo_host != default_gh_host) { cli::cli_text("Host:") kv_line("Non-default GitHub host", repo_host) pat_sitrep(repo_host, scope = "project", scold_for_renviron = FALSE) cli::cli_text("Project:") } ui_bullets(format(cfg)) } invisible() } git_user_sitrep <- function(scope = c("user", "project")) { scope <- rlang::arg_match(scope) where <- where_from_scope(scope) user <- git_user_get(where) user_local <- git_user_get("local") if (scope == "project" && !all(map_lgl(user_local, is.null))) { ui_bullets(c("i" = "This repo has a locally configured user.")) } kv_line("Name", user$name) kv_line("Email", user$email) git_user_check(user) invisible(NULL) } git_user_check <- function(user) { if (all(map_lgl(user, is.null))) { hint <- 'use_git_config(user.name = "", user.email = "")' ui_bullets(c( "x" = "Git user's name and email are not set.", "i" = "Configure using {.code {hint}}." )) return(invisible(NULL)) } if (is.null(user$name)) { hint <- 'use_git_config(user.name = "")' ui_bullets(c( "x" = "Git user's name is not set.", "i" = "Configure using {.code {hint}}." )) } if (is.null(user$email)) { hint <- 'use_git_config(user.email = "")' ui_bullets(c( "x" = "Git user's email is not set.", "i" = "Configure using {.code {hint}}." )) } } default_branch_sitrep <- function() { tryCatch( kv_line("Default branch", git_default_branch()), error_default_branch = function(e) { if (has_name(e, "db_local")) { # FYI existence of db_local implies existence of db_source ui_bullets(c( "x" = "Default branch mismatch between local repo and remote.", "i" = "The default branch of the {.val {e$db_source$name}} remote is {.val {e$db_source$default_branch}}.", "!" = "The local repo has no branch named {.val {e$db_source$default_branch}}.", "_" = "Call {.run [git_default_branch_rediscover()](usethis::git_default_branch_rediscover())} to resolve this." )) } else if (has_name(e, "db_source")) { ui_bullets(c( "x" = "Default branch mismatch between local repo and remote.", "i" = "The default branch of the {.val {e$db_source$name}} remote is {.val {e$db_source$default_branch}}.", "!" = "The local repo has no branch by that name, nor any other obvious candidates.", "_" = "Call {.run [git_default_branch_rediscover()](usethis::git_default_branch_rediscover())} to resolve this." )) } else { ui_bullets(c("Default branch cannot be determined.")) } } ) } # Vaccination ------------------------------------------------------------- #' Vaccinate your global gitignore file #' #' Adds `.Rproj.user`, `.Rhistory`, `.Rdata`, `.httr-oauth`, `.DS_Store`, and #' `.quarto` to your global (a.k.a. user-level) `.gitignore`. This is good #' practice as it decreases the chance that you will accidentally leak #' credentials to GitHub. `git_vaccinate()` also tries to detect and fix the #' situation where you have a global gitignore file, but it's missing from your #' global Git config. #' #' @export git_vaccinate <- function() { ensure_core_excludesFile() path <- git_ignore_path(scope = "user") if (!file_exists(path)) { ui_bullets(c( "v" = "Creating the global (user-level) gitignore: {.path {pth(path)}}" )) } write_union(path, git_ignore_lines) } git_vaccinated <- function() { path <- git_ignore_path("user") if (is.null(path) || !file_exists(path)) { return(FALSE) } # on Windows, if ~/ is present, take care to expand it the fs way lines <- read_utf8(user_path_prep(path)) all(git_ignore_lines %in% lines) } git_ignore_lines <- c( ".Rproj.user", ".Rhistory", ".Rdata", ".httr-oauth", ".DS_Store", ".quarto" ) usethis/R/description.R0000644000176200001440000001210214717524762014620 0ustar liggesusers#' Create or modify a DESCRIPTION file #' #' @description #' #' `use_description()` creates a `DESCRIPTION` file. Although mostly associated #' with R packages, a `DESCRIPTION` file can also be used to declare #' dependencies for a non-package project. Within such a project, #' `devtools::install_deps()` can then be used to install all the required #' packages. Note that, by default, `use_decription()` checks for a #' CRAN-compliant package name. You can turn this off with `check_name = FALSE`. #' #' usethis consults the following sources, in this order, to set `DESCRIPTION` #' fields: #' * `fields` argument of [create_package()] or `use_description()` #' * `getOption("usethis.description")` #' * Defaults built into usethis #' #' The fields discovered via options or the usethis package can be viewed with #' `use_description_defaults()`. #' #' If you create a lot of packages, consider storing personalized defaults as a #' named list in an option named `"usethis.description"`. Here's an example of #' code to include in `.Rprofile`, which can be opened via [edit_r_profile()]: #' #' ``` #' options( #' usethis.description = list( #' "Authors@R" = utils::person( #' "Jane", "Doe", #' email = "jane@example.com", #' role = c("aut", "cre"), #' comment = c(ORCID = "YOUR-ORCID-ID") #' ), #' Language = "es", #' License = "MIT + file LICENSE" #' ) #' ) #' ``` #' #' Prior to usethis v2.0.0, `getOption("devtools.desc")` was consulted for #' backwards compatibility, but now only the `"usethis.description"` option is #' supported. #' #' @param fields A named list of fields to add to `DESCRIPTION`, potentially #' overriding default values. Default values are taken from the #' `"usethis.description"` option or the usethis package (in that order), and #' can be viewed with `use_description_defaults()`. #' @param check_name Whether to check if the name is valid for CRAN and throw an #' error if not. #' @param roxygen If `TRUE`, sets `RoxygenNote` to current roxygen2 version #' @seealso The [description chapter](https://r-pkgs.org/description.html) #' of [R Packages](https://r-pkgs.org) #' @export #' @examples #' \dontrun{ #' use_description() #' #' use_description(fields = list(Language = "es")) #' #' use_description_defaults() #' } use_description <- function(fields = list(), check_name = TRUE, roxygen = TRUE) { name <- project_name() if (check_name) { check_package_name(name) } proj_desc_create(name = name, fields = fields, roxygen = roxygen) } #' @rdname use_description #' @param package Package name #' @export use_description_defaults <- function(package = NULL, roxygen = TRUE, fields = list()) { fields <- fields %||% list() check_is_named_list(fields) usethis <- usethis_description_defaults(package) if (roxygen) { if (is_installed("roxygen2")) { roxygen_note <- utils::packageVersion("roxygen2") } else { roxygen_note <- "7.0.0" # version doesn't really matter } usethis$Roxygen <- "list(markdown = TRUE)" usethis$RoxygenNote <- roxygen_note } options <- getOption("usethis.description") %||% list() # A `person` object in Authors@R is not patched in by modifyList() modify_this <- function(orig, patch) { out <- utils::modifyList(orig, patch) if (inherits(patch$`Authors@R`, "person")) { #if (has_name(patch, "Authors@R")) { out$`Authors@R` <- patch$`Authors@R` } out } defaults <- modify_this(usethis, options) defaults <- modify_this(defaults, fields) # Ensure each element is a single string if (inherits(defaults$`Authors@R`, "person")) { defaults$`Authors@R` <- format(defaults$`Authors@R`, style = "R") defaults$`Authors@R` <- paste0(defaults$`Authors@R`, collapse = "\n") } defaults <- lapply(defaults, paste, collapse = "") compact(defaults) } usethis_description_defaults <- function(package = NULL) { list( Package = package %||% "valid.package.name.goes.here", Version = "0.0.0.9000", Title = "What the Package Does (One Line, Title Case)", Description = "What the package does (one paragraph).", "Authors@R" = 'person("First", "Last", email = "first.last@example.com", role = c("aut", "cre"))', License = "`use_mit_license()`, `use_gpl3_license()` or friends to pick a license", Encoding = "UTF-8" ) } check_package_name <- function(name) { if (!valid_package_name(name)) { ui_abort(c( "x" = "{.val {name}} is not a valid package name. To be allowed on CRAN, it should:", "*" = "Contain only ASCII letters, numbers, and '.'.", "*" = "Have at least two characters.", "*" = "Start with a letter.", "*" = "Not end with '.'." )) } } valid_package_name <- function(x) { grepl("^[a-zA-Z][a-zA-Z0-9.]+$", x) && !grepl("\\.$", x) } tidy_desc <- function(desc) { desc$set("Encoding" = "UTF-8") # Normalize all fields (includes reordering) # Wrap in a try() so it always succeeds, even if user options are malformed try(desc$normalize(), silent = TRUE) } usethis/R/course.R0000644000176200001440000005263614717524721013610 0ustar liggesusers## see end of file for some cURL notes #' Download and unpack a ZIP file #' #' Functions to download and unpack a ZIP file into a local folder of files, #' with very intentional default behaviour. Useful in pedagogical settings or #' anytime you need a large audience to download a set of files quickly and #' actually be able to find them. The underlying helpers are documented in #' [use_course_details]. #' #' @param url Link to a ZIP file containing the materials. To reduce the chance #' of typos in live settings, these shorter forms are accepted: #' #' * GitHub repo spec: "OWNER/REPO". Equivalent to #' `https://github.com/OWNER/REPO/DEFAULT_BRANCH.zip`. #' * bit.ly, pos.it, or rstd.io shortlinks: "bit.ly/xxx-yyy-zzz", "pos.it/foofy" or "rstd.io/foofy". #' The instructor must then arrange for the shortlink to point to a valid #' download URL for the target ZIP file. The helper #' [create_download_url()] helps to create such URLs for GitHub, DropBox, #' and Google Drive. #' @param destdir Destination for the new folder. Defaults to the location #' stored in the global option `usethis.destdir`, if defined, or to the user's #' Desktop or similarly conspicuous place otherwise. #' @param cleanup Whether to delete the original ZIP file after unpacking its #' contents. In an interactive setting, `NA` leads to a menu where user can #' approve the deletion (or decline). #' #' @return Path to the new directory holding the unpacked ZIP file, invisibly. #' @name zip-utils #' @examples #' \dontrun{ #' # download the source of usethis from GitHub, behind a bit.ly shortlink #' use_course("bit.ly/usethis-shortlink-example") #' use_course("http://bit.ly/usethis-shortlink-example") #' #' # download the source of rematch2 package from CRAN #' use_course("https://cran.r-project.org/bin/windows/contrib/3.4/rematch2_2.0.1.zip") #' #' # download the source of rematch2 package from GitHub, 4 ways #' use_course("r-lib/rematch2") #' use_course("https://api.github.com/repos/r-lib/rematch2/zipball/HEAD") #' use_course("https://api.github.com/repos/r-lib/rematch2/zipball/main") #' use_course("https://github.com/r-lib/rematch2/archive/main.zip") #' } NULL #' @describeIn zip-utils #' #' Designed with live workshops in mind. Includes intentional friction to #' highlight the download destination. Workflow: #' * User executes, e.g., `use_course("bit.ly/xxx-yyy-zzz")`. #' * User is asked to notice and confirm the location of the new folder. Specify #' `destdir` or configure the `"usethis.destdir"` option to prevent this. #' * User is asked if they'd like to delete the ZIP file. #' * If new folder contains an `.Rproj` file, a new instance of RStudio is #' launched. Otherwise, the folder is opened in the file manager, e.g. Finder #' or File Explorer. #' @export use_course <- function(url, destdir = getOption("usethis.destdir")) { url <- normalize_url(url) destdir_not_specified <- is.null(destdir) destdir <- user_path_prep(destdir %||% conspicuous_place()) check_path_is_directory(destdir) if (destdir_not_specified && is_interactive()) { ui_bullets(c( "i" = "Downloading into {.path {pth(destdir)}}.", "_" = "Prefer a different location? Cancel, try again, and specify {.arg destdir}." )) if (ui_nah("OK to proceed?")) { ui_bullets(c(x = "Cancelling download.")) return(invisible()) } } ui_bullets(c("v" = "Downloading from {.url {url}}.")) zipfile <- tidy_download(url, destdir) ui_bullets(c("v" = "Download stored in {.path {pth(zipfile)}}.")) check_is_zip(attr(zipfile, "content-type")) tidy_unzip(zipfile, cleanup = NA) } #' @describeIn zip-utils #' #' More useful in day-to-day work. Downloads in current working directory, by #' default, and allows `cleanup` behaviour to be specified. #' @export use_zip <- function(url, destdir = getwd(), cleanup = if (rlang::is_interactive()) NA else FALSE) { url <- normalize_url(url) check_path_is_directory(destdir) ui_bullets(c("v" = "Downloading from {.url {url}}.")) zipfile <- tidy_download(url, destdir) ui_bullets(c("v" = "Download stored in {.path {pth(zipfile)}}.")) check_is_zip(attr(zipfile, "content-type")) tidy_unzip(zipfile, cleanup) } #' Helpers to download and unpack a ZIP file #' #' @description #' Details on the internal and helper functions that power [use_course()] and #' [use_zip()]. Only `create_download_url()` is exported. #' #' @name use_course_details #' @keywords internal #' @usage #' tidy_download(url, destdir = getwd()) #' tidy_unzip(zipfile, cleanup = FALSE) #' #' @aliases tidy_download tidy_unzip #' @param url A GitHub, DropBox, or Google Drive URL. #' * For `create_download_url()`: A URL copied from a web browser. #' * For `tidy_download()`: A download link for a ZIP file, possibly behind a #' shortlink or other redirect. `create_download_url()` can be helpful for #' creating this URL from typical browser URLs. #' @param destdir Path to existing local directory where the ZIP file will be #' stored. Defaults to current working directory, but note that [use_course()] #' has different default behavior. #' @param zipfile Path to local ZIP file. #' @param cleanup Whether to delete the ZIP file after unpacking. In an #' interactive session, `cleanup = NA` leads to asking the user if they #' want to delete or keep the ZIP file. #' @section tidy_download(): #' #' ``` #' # how it's used inside use_course() #' tidy_download( #' # url has been processed with internal helper normalize_url() #' url, #' # conspicuous_place() = `getOption('usethis.destdir')` or desktop or home #' # directory or working directory #' destdir = destdir %||% conspicuous_place() #' ) #' ``` #' #' Special-purpose function to download a ZIP file and automatically determine #' the file name, which often determines the folder name after unpacking. #' Developed with DropBox and GitHub as primary targets, possibly via #' shortlinks. Both platforms offer a way to download an entire folder or repo #' as a ZIP file, with information about the original folder or repo transmitted #' in the `Content-Disposition` header. In the absence of this header, a #' filename is generated from the input URL. In either case, the filename is #' sanitized. Returns the path to downloaded ZIP file, invisibly. #' #' `tidy_download()` is setup to retry after a download failure. In an #' interactive session, it asks for user's consent. All retries use a longer #' connect timeout. #' #' ## DropBox #' #' To make a folder available for ZIP download, create a shared link for it: #' * #' #' A shared link will have this form: #' ``` #' https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=0 #' ``` #' Replace the `dl=0` at the end with `dl=1` to create a download link: #' ``` #' https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=1 #' ``` #' You can use `create_download_url()` to do this conversion. #' #' This download link (or a shortlink that points to it) is suitable as input #' for `tidy_download()`. After one or more redirections, this link will #' eventually lead to a download URL. For more details, see #' and #' . #' #' ## GitHub #' #' Click on the repo's "Clone or download" button, to reveal a "Download ZIP" #' button. Capture this URL, which will have this form: #' ``` #' https://github.com/r-lib/usethis/archive/main.zip #' ``` #' This download link (or a shortlink that points to it) is suitable as input #' for `tidy_download()`. After one or more redirections, this link will #' eventually lead to a download URL. Here are other links that also lead to #' ZIP download, albeit with a different filenaming scheme (REF could be a #' branch name, a tag, or a SHA): #' ``` #' https://github.com/github.com/r-lib/usethis/zipball/HEAD #' https://api.github.com/repos/r-lib/rematch2/zipball/REF #' https://api.github.com/repos/r-lib/rematch2/zipball/HEAD #' https://api.github.com/repos/r-lib/usethis/zipball/REF #' ``` #' #' You can use `create_download_url()` to create the "Download ZIP" URL from #' a typical GitHub browser URL. #' #' ## Google Drive #' #' To our knowledge, it is not possible to download a Google Drive folder as a #' ZIP archive. It is however possible to share a ZIP file stored on Google #' Drive. To get its URL, click on "Get the shareable link" (within the "Share" #' menu). This URL doesn't allow for direct download, as it's designed to be #' processed in a web browser first. Such a sharing link looks like: #' #' ``` #' https://drive.google.com/open?id=123456789xxyyyzzz #' ``` #' #' To be able to get the URL suitable for direct download, you need to extract #' the "id" element from the URL and include it in this URL format: #' #' ``` #' https://drive.google.com/uc?export=download&id=123456789xxyyyzzz #' ``` #' #' Use `create_download_url()` to perform this transformation automatically. #' #' @section tidy_unzip(): #' #' Special-purpose function to unpack a ZIP file and (attempt to) create the #' directory structure most people want. When unpacking an archive, it is easy #' to get one more or one less level of nesting than you expected. #' #' It's especially important to finesse the directory structure here: we want #' the same local result when unzipping the same content from either GitHub or #' DropBox ZIP files, which pack things differently. Here is the intent: #' * If the ZIP archive `foo.zip` does not contain a single top-level directory, #' i.e. it is packed as "loose parts", unzip into a directory named `foo`. #' Typical of DropBox ZIP files. #' * If the ZIP archive `foo.zip` has a single top-level directory (which, by #' the way, is not necessarily called "foo"), unpack into said directory. #' Typical of GitHub ZIP files. #' #' Returns path to the directory holding the unpacked files, invisibly. #' #' **DropBox:** #' The ZIP files produced by DropBox are special. The file list tends to contain #' a spurious directory `"/"`, which we ignore during unzip. Also, if the #' directory is a Git repo and/or RStudio Project, we unzip-ignore various #' hidden files, such as `.RData`, `.Rhistory`, and those below `.git/` and #' `.Rproj.user`. #' #' @examples #' \dontrun{ #' tidy_download("https://github.com/r-lib/rematch2/archive/main.zip") #' tidy_unzip("rematch2-main.zip") #' } NULL # 1. downloads from `url` # 2. calls a retry-capable helper to download the ZIP file # 3. determines filename from content-description header (with fallbacks) # 4. returned path has content-type and content-description as attributes tidy_download <- function(url, destdir = getwd()) { check_path_is_directory(destdir) tmp <- file_temp("tidy-download-") h <- download_url(url, destfile = tmp) cli::cat_line() cd <- content_disposition(h) base_name <- make_filename(cd, fallback = path_file(url)) full_path <- path(destdir, base_name) if (!can_overwrite(full_path)) { ui_abort(" Cancelling download, to avoid overwriting {.path {pth(full_path)}}.") } attr(full_path, "content-type") <- content_type(h) attr(full_path, "content-disposition") <- cd file_move(tmp, full_path) invisible(full_path) } download_url <- function(url, destfile, handle = curl::new_handle(), n_tries = 3, retry_connecttimeout = 40L) { handle_options <- list(noprogress = FALSE, progressfunction = progress_fun) curl::handle_setopt(handle, .list = handle_options) we_should_retry <- function(i, n_tries, status) { if (i >= n_tries) { FALSE } else if (inherits(status, "error")) { # TODO: find a way to detect a (connect) timeout more specifically? # https://github.com/jeroen/curl/issues/154 # https://ec.haxx.se/usingcurl/usingcurl-timeouts # "Failing to connect within the given time will cause curl to exit with a # timeout exit code (28)." # (however, note that all timeouts lead to this same exit code) # https://ec.haxx.se/usingcurl/usingcurl-returns # "28. Operation timeout. The specified time-out period was reached # according to the conditions. curl offers several timeouts, and this exit # code tells one of those timeout limits were reached." # https://github.com/curl/curl/blob/272282a05416e42d2cc4a847a31fd457bc6cc827/lib/strerror.c#L143-L144 # "Timeout was reached" <-- actual message we could potentially match TRUE } else { FALSE } } status <- try_download(url, destfile, handle = handle) if (inherits(status, "error") && is_interactive()) { ui_bullets(c("x" = status$message)) if (ui_nah(c( "!" = "Download failed :(", "i" = "See above for everything we know about why it failed.", " " = "Shall we try a couple more times, with a longer timeout?" ))) { n_tries <- 1 } } i <- 1 # invariant: we have made i download attempts while (we_should_retry(i, n_tries, status)) { if (i == 1) { curl::handle_setopt( handle, .list = c(connecttimeout = retry_connecttimeout) ) } i <- i + 1 ui_bullets(c("i" = "Retrying download ... attempt {i}.")) status <- try_download(url, destfile, handle = handle) } if (inherits(status, "error")) { stop(status) } invisible(handle) } try_download <- function(url, destfile, quiet = FALSE, mode = "wb", handle) { tryCatch( curl::curl_download( url = url, destfile = destfile, quiet = quiet, mode = mode, handle = handle ), error = function(e) e ) } tidy_unzip <- function(zipfile, cleanup = FALSE) { base_path <- path_dir(zipfile) filenames <- utils::unzip(zipfile, list = TRUE)[["Name"]] ## deal with DropBox's peculiar habit of including "/" as a file --> drop it filenames <- filenames[filenames != "/"] ## DropBox ZIP files often include lots of hidden R, RStudio, and Git files filenames <- filenames[keep_lgl(filenames)] parents <- path_before_slash(filenames) unique_parents <- unique(parents) if (length(unique_parents) == 1 && unique_parents != "") { target <- path(base_path, unique_parents) utils::unzip(zipfile, files = filenames, exdir = base_path) } else { # there is no parent; archive contains loose parts target <- path_ext_remove(zipfile) utils::unzip(zipfile, files = filenames, exdir = target) } ui_bullets(c( "v" = "Unpacking ZIP file into {.path {pth(target, base_path)}} ({length(filenames)} file{?s} extracted)." )) if (isNA(cleanup)) { cleanup <- is_interactive() && ui_yep("Shall we delete the ZIP file ({.path {pth(zipfile, base_path)}})?") } if (isTRUE(cleanup)) { ui_bullets(c("v" = "Deleting {.path {pth(zipfile, base_path)}}.")) file_delete(zipfile) } if (is_interactive()) { rproj_path <- rproj_paths(target) if (length(rproj_path) == 1 && rstudioapi::hasFun("openProject")) { ui_bullets(c("v" = "Opening project in RStudio.")) rstudioapi::openProject(target, newSession = TRUE) } else if (!in_rstudio_server()) { ui_bullets(c( "v" = "Opening {.path {pth(target, base_path)}} in the file manager." )) utils::browseURL(path_real(target)) } } invisible(unclass(target)) } #' @rdname use_course_details #' @examples #' # GitHub #' create_download_url("https://github.com/r-lib/usethis") #' create_download_url("https://github.com/r-lib/usethis/issues") #' #' # DropBox #' create_download_url("https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=0") #' #' # Google Drive #' create_download_url("https://drive.google.com/open?id=123456789xxyyyzzz") #' create_download_url("https://drive.google.com/open?id=123456789xxyyyzzz/view") #' @export create_download_url <- function(url) { check_name(url) stopifnot(grepl("^http[s]?://", url)) switch( classify_url(url), drive = modify_drive_url(url), dropbox = modify_dropbox_url(url), github = modify_github_url(url), hopeless_url(url) ) } classify_url <- function(url) { if (grepl("drive.google.com", url)) { return("drive") } if (grepl("dropbox.com/sh", url)) { return("dropbox") } if (grepl("github.com", url)) { return("github") } "unknown" } modify_drive_url <- function(url) { # id-isolating approach taken from the gargle / googleverse id_loc <- regexpr("/d/([^/])+|/folders/([^/])+|id=([^/])+", url) if (id_loc == -1) { return(hopeless_url(url)) } id <- gsub("/d/|/folders/|id=", "", regmatches(url, id_loc)) glue_chr("https://drive.google.com/uc?export=download&id={id}") } modify_dropbox_url <- function(url) { gsub("dl=0", "dl=1", url) } modify_github_url <- function(url) { # TO CONSIDER: one could use the API for this, which might be more proper and # would work if auth is needed # https://docs.github.com/en/free-pro-team@latest/rest/reference/repos#download-a-repository-archive-zip # https://api.github.com/repos/OWNER/REPO/zipball/ # but then, in big workshop settings, we might see rate limit problems or # get blocked because of too many token-free requests from same IP parsed <- parse_github_remotes(url) glue_data_chr(parsed, "{protocol}://{host}/{repo_owner}/{repo_name}/zipball/HEAD") } hopeless_url <- function(url) { ui_bullets(c( "!" = "URL does not match a recognized form for Google Drive or DropBox; no change made." )) url } normalize_url <- function(url) { check_name(url) has_scheme <- grepl("^http[s]?://", url) if (has_scheme) { return(url) } if (!is_shortlink(url)) { url <- tryCatch( expand_github(url), error = function(e) url ) } paste0("https://", url) } is_shortlink <- function(url) { shortlink_hosts <- c("rstd\\.io", "bit\\.ly", "pos\\.it") any(map_lgl(shortlink_hosts, grepl, x = url)) } expand_github <- function(url) { # mostly to handle errors in the spec repo_spec <- parse_repo_spec(url) glue_data_chr(repo_spec, "github.com/{owner}/{repo}/zipball/HEAD") } conspicuous_place <- function() { destdir_opt <- getOption("usethis.destdir") if (!is.null(destdir_opt)) { return(path_tidy(destdir_opt)) } Filter(dir_exists, c( path_home("Desktop"), path_home(), path_home_r(), path_tidy(getwd()) ))[[1]] } keep_lgl <- function(file, ignores = c(".Rproj.user", ".rproj.user", ".Rhistory", ".RData", ".git", "__MACOSX", ".DS_Store")) { ignores <- paste0( "((\\/|\\A)", gsub("\\.", "[.]", ignores), "(\\/|\\Z))", collapse = "|" ) !grepl(ignores, file, perl = TRUE) } path_before_slash <- function(filepath) { f <- function(x) { parts <- strsplit(x, "/", fixed = TRUE)[[1]] if (length(parts) > 1 || grepl("/", x)) { parts[1] } else { "" } } purrr::map_chr(filepath, f) } content_type <- function(h) { headers <- curl::parse_headers_list(curl::handle_data(h)$headers) headers[["content-type"]] } content_disposition <- function(h) { headers <- curl::parse_headers_list(curl::handle_data(h)$headers) cd <- headers[["content-disposition"]] if (is.null(cd)) { return() } parse_content_disposition(cd) } check_is_zip <- function(ct) { # "https://www.fueleconomy.gov/feg/epadata/16data.zip" comes with # MIME type "application/x-zip-compressed" # see https://github.com/r-lib/usethis/issues/573 allowed <- c("application/zip", "application/x-zip-compressed") if (!ct %in% allowed) { ui_abort(c( "Download does not have MIME type {.val application/zip}.", "Instead it's {.val {ct}}." )) } invisible(ct) } ## https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Disposition ## https://tools.ietf.org/html/rfc6266 ## DropBox eg: "attachment; filename=\"foo.zip\"; filename*=UTF-8''foo.zip\" ## GitHub eg: "attachment; filename=foo-main.zip" # https://stackoverflow.com/questions/30193569/get-content-disposition-parameters # http://test.greenbytes.de/tech/tc2231/ parse_content_disposition <- function(cd) { if (!grepl("^attachment;", cd)) { ui_abort(c( "{.code Content-Disposition} header doesn't start with {.val attachment}.", "Actual header: {.val cd}" )) } cd <- sub("^attachment;\\s*", "", cd, ignore.case = TRUE) cd <- strsplit(cd, "\\s*;\\s*")[[1]] cd <- strsplit(cd, "=") stats::setNames( vapply(cd, `[[`, character(1), 2), vapply(cd, `[[`, character(1), 1) ) } progress_fun <- function(down, up) { total <- down[[1]] now <- down[[2]] pct <- if (length(total) && total > 0) { paste0("(", round(now / total * 100), "%)") } else { "" } if (now > 10000) { cat("\rDownloaded:", sprintf("%.2f", now / 2^20), "MB ", pct) } TRUE } make_filename <- function(cd, fallback = path_file(file_temp())) { ## TO DO(jennybc): the element named 'filename*' is preferred but I'm not ## sure how to parse it yet, so targeting 'filename' for now ## https://tools.ietf.org/html/rfc6266 cd <- cd[["filename"]] if (is.null(cd) || is.na(cd)) { check_name(fallback) return(path_sanitize(fallback)) } ## I know I could use regex and lookahead but this is easier for me to ## maintain cd <- sub("^\"(.+)\"$", "\\1", cd) path_sanitize(cd) } ## https://stackoverflow.com/questions/21322614/use-curl-to-download-a-dropbox-folder-via-shared-link-not-public-link ## lesson: if using cURL, you'd want these options ## -L, --location (follow redirects) ## -O, --remote-name (name local file like the file part of remote name) ## -J, --remote-header-name (tells -O option to consult Content-Disposition ## instead of the URL) ## https://curl.haxx.se/docs/manpage.html#OPTIONS usethis/R/sitrep.R0000644000176200001440000000744414651000165013577 0ustar liggesusers#' Report working directory and usethis/RStudio project #' #' @description `proj_sitrep()` reports #' * current working directory #' * the active usethis project #' * the active RStudio Project #' #' @description Call this function if things seem weird and you're not sure #' what's wrong or how to fix it. Usually, all three of these should coincide #' (or be unset) and `proj_sitrep()` provides suggested commands for getting #' back to this happy state. #' #' @return A named list, with S3 class `sitrep` (for printing purposes), #' reporting current working directory, active usethis project, and active #' RStudio Project #' @export #' @family project functions #' @examples #' proj_sitrep() proj_sitrep <- function() { out <- list( working_directory = getwd(), active_usethis_proj = if (proj_active()) proj_get(), active_rstudio_proj = if (rstudioapi::hasFun("getActiveProject")) { rstudioapi::getActiveProject() } ## TODO(?): address home directory to help clarify fs issues on Windows? ## home_usethis = fs::path_home(), ## home_r = normalizePath("~") ) out <- ifelse(map_lgl(out, is.null), out, as.character(path_tidy(out))) structure(out, class = "sitrep") } #' @export print.sitrep <- function(x, ...) { keys <- format(names(x), justify = "right") purrr::walk2(keys, x, kv_line) rstudio_proj_is_active <- !is.null(x[["active_rstudio_proj"]]) usethis_proj_is_active <- !is.null(x[["active_usethis_proj"]]) rstudio_proj_is_not_wd <- rstudio_proj_is_active && x[["working_directory"]] != x[["active_rstudio_proj"]] usethis_proj_is_not_wd <- usethis_proj_is_active && x[["working_directory"]] != x[["active_usethis_proj"]] usethis_proj_is_not_rstudio_proj <- usethis_proj_is_active && rstudio_proj_is_active && x[["active_rstudio_proj"]] != x[["active_usethis_proj"]] if (rstudio_available() && !rstudio_proj_is_active) { ui_bullets(c( "i" = "You are working in RStudio, but are not in an RStudio Project.", "i" = "A Project-based workflow offers many advantages. Read more at:", " " = "{.url https://docs.posit.co/ide/user/ide/guide/code/projects.html}", " " = "{.url https://rstats.wtf/projects}" )) } if (!usethis_proj_is_active) { ui_bullets(c( "i" = "There is currently no active {.pkg usethis} project.", "i" = "{.pkg usethis} attempts to activate a project upon first need.", "_" = "Call {.run usethis::proj_get()} to initiate project discovery.", "_" = 'Call {.code proj_set("path/to/project")} or {.code proj_activate("path/to/project")} to provide an explicit path.' )) } if (usethis_proj_is_not_wd) { ui_bullets(c( "i" = "Your working directory is not the same as the active usethis project.", "_" = "Set working directory to the project: {.code setwd(proj_get())}.", "_" = "Set project to working directory: {.code usethis::proj_set(getwd())}." )) } if (rstudio_proj_is_not_wd) { ui_bullets(c( "i" = "Your working directory is not the same as the active RStudio Project.", "_" = "Set working directory to the Project: {.code setwd(rstudioapi::getActiveProject())}." )) } if (usethis_proj_is_not_rstudio_proj) { ui_bullets(c( "i" = "Your active RStudio Project is not the same as the active {.pkg usethis} project.", "_" = "Set active {.pkg usethis} project to RStudio Project: {.code usethis::proj_set(rstudioapi::getActiveProject())}.", "_" = "Restart RStudio in the active {.pkg usethis} project: {.code rstudioapi::openProject(usethis::proj_get())}.", "_" = "Open the active {.pkg usethis} project in a new instance of RStudio: {.code usethis::proj_activate(usethis::proj_get())}." )) } invisible(x) } usethis/R/pr.R0000644000176200001440000010750114717524762012726 0ustar liggesusers#' Helpers for GitHub pull requests #' #' @description #' The `pr_*` family of functions is designed to make working with GitHub pull #' requests (PRs) as painless as possible for both contributors and package #' maintainers. #' #' To use the `pr_*` functions, your project must be a Git repo and have one of #' these GitHub remote configurations: #' * "ours": You can push to the GitHub remote configured as `origin` and it's #' not a fork. #' * "fork": You can push to the GitHub remote configured as `origin`, it's a #' fork, and its parent is configured as `upstream`. `origin` points to your #' **personal** copy and `upstream` points to the **source repo**. #' #' "Ours" and "fork" are two of several GitHub remote configurations examined in #' [Common remote setups](https://happygitwithr.com/common-remote-setups.html) #' in Happy Git and GitHub for the useR. #' #' The [Pull Request #' Helpers](https://usethis.r-lib.org/articles/articles/pr-functions.html) #' article walks through the process of making a pull request with the `pr_*` #' functions. #' #' The `pr_*` functions also use your Git/GitHub credentials to carry out #' various remote operations; see below for more about auth. The `pr_*` #' functions also proactively check for agreement re: the default branch in your #' local repo and the source repo. See [git_default_branch()] for more. #' #' @template double-auth #' #' @section For contributors: #' To contribute to a package, first use `create_from_github("OWNER/REPO")`. #' This forks the source repository and checks out a local copy. #' #' Next use `pr_init()` to create a branch for your PR. It is best practice to #' never make commits to the default branch branch of a fork (usually named #' `main` or `master`), because you do not own it. A pull request should always #' come from a feature branch. It will be much easier to pull upstream changes #' from the fork parent if you only allow yourself to work in feature branches. #' It is also much easier for a maintainer to explore and extend your PR if you #' create a feature branch. #' #' Work locally, in your branch, making changes to files, and committing your #' work. Once you're ready to create the PR, run `pr_push()` to push your local #' branch to GitHub, and open a webpage that lets you initiate the PR (or draft #' PR). #' #' To learn more about the process of making a pull request, read the [Pull #' Request #' Helpers](https://usethis.r-lib.org/articles/articles/pr-functions.html) #' vignette. #' #' If you are lucky, your PR will be perfect, and the maintainer will accept it. #' You can then run `pr_finish()` to delete your PR branch. In most cases, #' however, the maintainer will ask you to make some changes. Make the changes, #' then run `pr_push()` to update your PR. #' #' It's also possible that the maintainer will contribute some code to your PR: #' to get those changes back onto your computer, run `pr_pull()`. It can also #' happen that other changes have occurred in the package since you first #' created your PR. You might need to merge the default branch (usually named #' `main` or `master`) into your PR branch. Do that by running #' `pr_merge_main()`: this makes sure that your PR is compatible with the #' primary repo's main line of development. Both `pr_pull()` and #' `pr_merge_main()` can result in merge conflicts, so be prepared to resolve #' before continuing. #' #' @section For maintainers: #' To download a PR locally so that you can experiment with it, run #' `pr_fetch()` and select the PR or, if you already know its number, call #' `pr_fetch()`. If you make changes, run `pr_push()` to push them #' back to GitHub. After you have merged the PR, run `pr_finish()` to delete the #' local branch and remove the remote associated with the contributor's fork. #' #' @section Overview of all the functions: #' * `pr_init()`: As a contributor, start work on a new PR by ensuring that #' your local repo is up-to-date, then creating and checking out a new branch. #' Nothing is pushed to or created on GitHub until you call `pr_push()`. #' * `pr_fetch()`: As a maintainer, review or contribute changes to an existing #' PR by creating a local branch that tracks the remote PR. `pr_fetch()` does as #' little work as possible, so you can also use it to resume work on an PR that #' already has a local branch (where it will also ensure your local branch is #' up-to-date). If called with no arguments, up to 9 open PRs are offered for #' interactive selection. #' * `pr_resume()`: Resume work on a PR by switching to an existing local branch #' and pulling any changes from its upstream tracking branch, if it has one. If #' called with no arguments, up to 9 local branches are offered for interactive #' selection, with a preference for branches connected to PRs and for branches #' with recent activity. #' * `pr_push()`: The first time it's called, a PR branch is pushed to GitHub #' and you're taken to a webpage where a new PR (or draft PR) can be created. #' This also sets up the local branch to track its remote counterpart. #' Subsequent calls to `pr_push()` make sure the local branch has all the remote #' changes and, if so, pushes local changes, thereby updating the PR. #' * `pr_pull()`: Pulls changes from the local branch's remote tracking branch. #' If a maintainer has extended your PR, this is how you bring those changes #' back into your local work. #' * `pr_merge_main()`: Pulls changes from the default branch of the source repo #' into the current local branch. This can be used when the local branch is the #' default branch or when it's a PR branch. #' * `pr_pause()`: Makes sure you're up-to-date with any remote changes in the #' PR. Then switches back to the default branch and pulls from the source repo. #' Use `pr_resume()` with name of branch or use `pr_fetch()` to resume using PR #' number. #' * `pr_view()`: Visits the PR associated with the current branch in the #' browser (default) or the specific PR identified by `number`. #' (FYI [browse_github_pulls()] is a handy way to visit the list of all PRs for #' the current project.) #' * `pr_forget()`: Does local clean up when the current branch is an actual or #' notional PR that you want to abandon. Maybe you initiated it yourself, via #' `pr_init()`, or you used `pr_fetch()` to explore a PR from GitHub. Only does #' *local* operations: does not update or delete any remote branches, nor does #' it close any PRs. Alerts the user to any uncommitted or unpushed work that is #' at risk of being lost. If user chooses to proceed, switches back to the #' default branch, pulls changes from source repo, and deletes local PR branch. #' Any associated Git remote is deleted, if the "forgotten" PR was the only #' branch using it. #' * `pr_finish()`: Does post-PR clean up, but does NOT actually merge or close #' a PR (maintainer should do this in the browser). If `number` is not given, #' infers the PR from the upstream tracking branch of the current branch. If #' `number` is given, it does not matter whether the PR exists locally. If PR #' exists locally, alerts the user to uncommitted or unpushed changes, then #' switches back to the default branch, pulls changes from source repo, and #' deletes local PR branch. If the PR came from an external fork, any associated #' Git remote is deleted, provided it's not in use by any other local branches. #' If the PR has been merged and user has permission, deletes the remote branch #' (this is the only remote operation that `pr_finish()` potentially does). #' #' @name pull-requests NULL #' @export #' @rdname pull-requests #' @param branch Name of a new or existing local branch. If creating a new #' branch, note this should usually consist of lower case letters, numbers, #' and `-`. pr_init <- function(branch) { check_string(branch) repo <- git_repo() if (gert::git_branch_exists(branch, local = TRUE, repo = repo)) { code <- glue('pr_resume("{branch}")') ui_bullets(c( "i" = "Branch {.val {branch}} already exists, calling {.code {code}}." )) return(pr_resume(branch)) } # don't absolutely require PAT success, because we could be offline # or in another salvageable situation, e.g. need to configure PAT cfg <- github_remote_config(github_get = NA) check_for_bad_config(cfg) tr <- target_repo(cfg, ask = FALSE) online <- is_online(tr$host) if (!online) { ui_bullets(c( "x" = "You are not currently online.", "i" = "You can still create a local branch, but we can't check that your current branch is up-to-date or setup the remote branch." )) if (ui_nah("Do you want to continue?")) { ui_bullets(c("x" = "Cancelling.")) return(invisible()) } } else { maybe_good_configs <- c("maybe_ours_or_theirs", "maybe_fork") if (cfg$type %in% maybe_good_configs) { ui_bullets(c( "x" = 'Unable to confirm the GitHub remote configuration is "pull request ready".', "i" = "You probably need to configure a personal access token for {.val {tr$host}}.", "i" = "See {.run usethis::gh_token_help()} for help with that." )) if (ui_github_remote_config_wat(cfg)) { ui_bullets(c("x" = "Cancelling.")) return(invisible()) } } } default_branch <- if (online) git_default_branch_(cfg) else guess_local_default_branch() challenge_non_default_branch( "Are you sure you want to create a PR branch based on a non-default branch?", default_branch = default_branch ) if (online) { # this is not pr_pull_source_override() because: # a) we may NOT be on default branch (although we probably are) # b) we didn't just switch to the branch we're on, therefore we have to # consider that the pull may be affected by uncommitted changes or a # merge current_branch <- git_branch() if (current_branch == default_branch) { # override for mis-configured forks, that have default branch tracking # the fork (origin) instead of the source (upstream) remref <- glue("{tr$remote}/{default_branch}") } else { remref <- git_branch_tracking(current_branch) } if (!is.na(remref)) { comparison <- git_branch_compare(current_branch, remref) if (comparison$remote_only > 0) { challenge_uncommitted_changes() } ui_bullets(c("v" = "Pulling changes from {.val {remref}}.")) git_pull(remref = remref, verbose = FALSE) } } ui_bullets(c("v" = "Creating and switching to local branch {.val {branch}}.")) gert::git_branch_create(branch, repo = repo) config_key <- glue("branch.{branch}.created-by") gert::git_config_set(config_key, value = "usethis::pr_init", repo = repo) ui_bullets(c("_" = "Use {.run usethis::pr_push()} to create a PR.")) invisible() } #' @export #' @rdname pull-requests pr_resume <- function(branch = NULL) { repo <- git_repo() if (is.null(branch)) { ui_bullets(c( "i" = "No branch specified ... looking up local branches and associated PRs." )) default_branch <- guess_local_default_branch() branch <- choose_branch(exclude = default_branch) if (is.null(branch)) { ui_bullets(c("x" = "Repo doesn't seem to have any non-default branches.")) return(invisible()) } if (length(branch) == 0) { ui_bullets(c("x" = "No branch selected, exiting.")) return(invisible()) } } check_string(branch) if (!gert::git_branch_exists(branch, local = TRUE, repo = repo)) { code <- glue('usethis::pr_init("{branch}")') ui_abort(c( "x" = "No branch named {.val {branch}} exists.", "_" = "Call {.run {code}} to create a new PR branch." )) } challenge_uncommitted_changes() ui_bullets(c("v" = "Switching to branch {.val {branch}}.")) gert::git_branch_checkout(branch, repo = repo) git_pull() ui_bullets(c("_" = "Use {.run usethis::pr_push()} to create or update PR.")) invisible() } #' @export #' @rdname pull-requests #' @param number Number of PR. #' @param target Which repo to target? This is only a question in the case of a #' fork. In a fork, there is some slim chance that you want to consider pull #' requests against your fork (the primary repo, i.e. `origin`) instead of #' those against the source repo (i.e. `upstream`, which is the default). #' #' @examples #' \dontrun{ #' pr_fetch(123) #' } pr_fetch <- function(number = NULL, target = c("source", "primary")) { repo <- git_repo() tr <- target_repo(github_get = NA, role = target, ask = FALSE) challenge_uncommitted_changes() if (is.null(number)) { ui_bullets(c("i" = "No PR specified ... looking up open PRs.")) pr <- choose_pr(tr = tr) if (is.null(pr)) { ui_bullets(c("x" = "No open PRs found for {.val {tr$repo_spec}}.")) return(invisible()) } if (min(lengths(pr)) == 0) { ui_bullets(c("x" = "No PR selected, exiting.")) return(invisible()) } } else { pr <- pr_get(number = number, tr = tr) } if (is.na(pr$pr_repo_owner)) { ui_abort(" The repo or branch where {.href [PR #{pr$pr_number}]({pr$pr_html_url})} originates seems to have been deleted.") } pr_user <- glue("@{pr$pr_user}") ui_bullets(c( "v" = "Checking out PR {.val {pr$pr_string}} ({.field {pr_user}}): {.val {pr$pr_title}}." )) if (pr$pr_from_fork && isFALSE(pr$maintainer_can_modify)) { ui_bullets(c( "!" = "Note that user does NOT allow maintainer to modify this PR at this time, although this can be changed." )) } remote <- github_remote_list(pr$pr_remote) if (nrow(remote) == 0) { url <- switch(tr$protocol, https = pr$pr_https_url, ssh = pr$pr_ssh_url) ui_bullets(c("v" = "Adding remote {.val {pr$pr_remote}} as {.val {url}}.")) gert::git_remote_add(url = url, name = pr$pr_remote, repo = repo) config_key <- glue("remote.{pr$pr_remote}.created-by") gert::git_config_set(config_key, "usethis::pr_fetch", repo = repo) } pr_remref <- glue_data(pr, "{pr_remote}/{pr_ref}") gert::git_fetch( remote = pr$pr_remote, refspec = pr$pr_ref, repo = repo, verbose = FALSE ) if (is.na(pr$pr_local_branch)) { pr$pr_local_branch <- if (pr$pr_from_fork) sub(":", "-", pr$pr_label) else pr$pr_ref } # Create local branch, if necessary, and switch to it ---- if (!gert::git_branch_exists(pr$pr_local_branch, local = TRUE, repo = repo)) { ui_bullets(c( "v" = "Creating and switching to local branch {.val {pr$pr_local_branch}}.", "v" = "Setting {.val {pr_remref}} as remote tracking branch." )) gert::git_branch_create(pr$pr_local_branch, ref = pr_remref, repo = repo) config_key <- glue("branch.{pr$pr_local_branch}.created-by") gert::git_config_set(config_key, "usethis::pr_fetch", repo = repo) config_url <- glue("branch.{pr$pr_local_branch}.pr-url") gert::git_config_set(config_url, pr$pr_html_url, repo = repo) return(invisible()) } # Local branch pre-existed; make sure tracking branch is set, switch, & pull ui_bullets(c("v" = "Switching to branch {.val {pr$pr_local_branch}}.")) gert::git_branch_checkout(pr$pr_local_branch, repo = repo) config_url <- glue("branch.{pr$pr_local_branch}.pr-url") gert::git_config_set(config_url, pr$pr_html_url, repo = repo) pr_branch_ours_tracking <- git_branch_tracking(pr$pr_local_branch) if (is.na(pr_branch_ours_tracking) || pr_branch_ours_tracking != pr_remref) { ui_bullets(c("v" = "Setting {.val {pr_remref}} as remote tracking branch.")) gert::git_branch_set_upstream(pr_remref, repo = repo) } git_pull(verbose = FALSE) } #' @export #' @rdname pull-requests pr_push <- function() { repo <- git_repo() cfg <- github_remote_config(github_get = TRUE) check_for_config(cfg, ok_configs = c("ours", "fork")) default_branch <- git_default_branch_(cfg) check_pr_branch(default_branch) challenge_uncommitted_changes() branch <- git_branch() remref <- git_branch_tracking(branch) if (is.na(remref)) { # this is the first push if (cfg$type == "fork" && cfg$upstream$can_push && is_interactive()) { choices <- c( origin = ui_pre_glue(" <> = {.val origin} (external PR)"), upstream = ui_pre_glue(" <> = {.val upstream} (internal PR)") ) choices_formatted <- map_chr(choices, cli::format_inline) title <- glue("Which repo do you want to push to?") choice <- utils::menu(choices_formatted, graphics = FALSE, title = title) remote <- names(choices)[[choice]] } else { remote <- "origin" } git_push_first(branch, remote) } else { check_branch_pulled(use = "pr_pull()") git_push(branch, remref) } # Prompt to create PR if does not exist yet tr <- target_repo(cfg, ask = FALSE) pr <- pr_find(branch, tr = tr) if (is.null(pr)) { pr_create() } else { ui_bullets(c( "_" = "View PR at {.url {pr$pr_html_url}} or call {.run usethis::pr_view()}." )) } invisible() } #' @export #' @rdname pull-requests pr_pull <- function() { cfg <- github_remote_config(github_get = TRUE) check_for_config(cfg) default_branch <- git_default_branch_(cfg) check_pr_branch(default_branch) challenge_uncommitted_changes() git_pull() # note associated PR in git config, if applicable tr <- target_repo(cfg, ask = FALSE) pr_find(tr = tr) invisible(TRUE) } #' @export #' @rdname pull-requests pr_merge_main <- function() { tr <- target_repo(github_get = TRUE, ask = FALSE) challenge_uncommitted_changes() remref <- glue("{tr$remote}/{tr$default_branch}") ui_bullets(c("v" = "Pulling changes from {.val {remref}}.")) git_pull(remref, verbose = FALSE) } #' @export #' @rdname pull-requests pr_view <- function(number = NULL, target = c("source", "primary")) { cfg <- github_remote_config(github_get = NA) tr <- target_repo(cfg, github_get = NA, role = target, ask = FALSE) url <- NULL if (is.null(number)) { branch <- git_branch() default_branch <- git_default_branch_(cfg) if (branch != default_branch) { url <- pr_url(branch = branch, tr = tr) if (is.null(url)) { ui_bullets(c( "i" = "Current branch ({.val {branch}}) does not appear to be connected to a PR." )) } else { number <- sub("^.+pull/", "", url) ui_bullets(c( "i" = "Current branch ({.val {branch}}) is connected to PR #{number}." )) } } } else { pr <- pr_get(number = number, tr = tr) url <- pr$pr_html_url } if (is.null(url)) { ui_bullets(c("i" = "No PR specified ... looking up open PRs.")) pr <- choose_pr(tr = tr) if (is.null(pr)) { ui_bullets(c("x" = "No open PRs found for {.val {tr$repo_spec}}.")) return(invisible()) } if (min(lengths(pr)) == 0) { ui_bullets(c("x" = "No PR selected, exiting.")) return(invisible()) } url <- pr$pr_html_url } view_url(url) } #' @export #' @rdname pull-requests pr_pause <- function() { cfg <- github_remote_config(github_get = NA) tr <- target_repo(cfg, github_get = NA, ask = FALSE) ui_bullets(c("v" = "Switching back to the default branch.")) default_branch <- git_default_branch_(cfg) if (git_branch() == default_branch) { ui_bullets(c( "!" = "Already on this repo's default branch ({.val {default_branch}}), nothing to do." )) return(invisible()) } challenge_uncommitted_changes() # TODO: what happens here if offline? check_branch_pulled(use = "pr_pull()") ui_bullets(c( "v" = "Switching back to default branch ({.val {default_branch}})." )) gert::git_branch_checkout(default_branch, repo = git_repo()) pr_pull_source_override(tr = tr, default_branch = default_branch) } #' @export #' @rdname pull-requests pr_finish <- function(number = NULL, target = c("source", "primary")) { pr_clean(number = number, target = target, mode = "finish") } #' @export #' @rdname pull-requests pr_forget <- function() pr_clean(mode = "forget") # unexported helpers ---- # Removes local evidence of PRs that you're done with or wish you'd never # started or fetched # Only possible remote action is to delete the remote branch for a merged PR pr_clean <- function(number = NULL, target = c("source", "primary"), mode = c("finish", "forget")) { withr::defer(rstudio_git_tickle()) mode <- match.arg(mode) repo <- git_repo() cfg <- github_remote_config(github_get = NA) tr <- target_repo(cfg, github_get = NA, role = target, ask = FALSE) default_branch <- git_default_branch_(cfg) if (is.null(number)) { check_pr_branch(default_branch) pr <- pr_find(git_branch(), tr = tr, state = "all") # if the remote branch has already been deleted (probably post-merge), we # can't always reverse engineer what the corresponding local branch was, but # we already know it -- it's how we found the PR in the first place! if (!is.null(pr)) { pr$pr_local_branch <- pr$pr_local_branch %|% git_branch() } } else { pr <- pr_get(number = number, tr = tr) } pr_local_branch <- if (is.null(pr)) git_branch() else pr$pr_local_branch if (!is.na(pr_local_branch)) { if (pr_local_branch == git_branch()) { challenge_uncommitted_changes() } tracking_branch <- git_branch_tracking(pr_local_branch) if (is.na(tracking_branch)) { if (ui_nah(c( "!" = "Local branch {.val {pr_local_branch}} has no associated remote branch.", "i" = "If we delete {.val {pr_local_branch}}, any work that exists only on this branch may be hard for you to recover.", " " = "Proceed anyway?" ))) { ui_bullets(c("x" = "Cancelling.")) return(invisible()) } } else { cmp <- git_branch_compare( branch = pr_local_branch, remref = tracking_branch ) if (cmp$local_only > 0 && ui_nah(c( "!" = "Local branch {.val {pr_local_branch}} has 1 or more commits that have not been pushed to {.val {tracking_branch}}.", "i" = "If we delete {.val {pr_local_branch}}, this work may be hard for you to recover.", " " = "Proceed anyway?" ))) { ui_bullets(c("x" = "Cancelling.")) return(invisible()) } } } if (git_branch() != default_branch) { ui_bullets(c( "v" = "Switching back to default branch ({.val {default_branch}})." )) gert::git_branch_checkout(default_branch, force = TRUE, repo = repo) pr_pull_source_override(tr = tr, default_branch = default_branch) } if (!is.na(pr_local_branch)) { ui_bullets(c( "v" = "Deleting local {.val {pr_local_branch}} branch." )) gert::git_branch_delete(pr_local_branch, repo = repo) } if (is.null(pr)) { return(invisible()) } if (mode == "finish") { pr_branch_delete(pr) } # delete remote, if we (usethis) added it AND no remaining tracking branches created_by <- git_cfg_get(glue("remote.{pr$pr_remote}.created-by")) if (is.null(created_by) || !grepl("^usethis::", created_by)) { return(invisible()) } branches <- gert::git_branch_list(local = TRUE, repo = repo) branches <- branches[!is.na(branches$upstream), ] if (sum(grepl(glue("^refs/remotes/{pr$pr_remote}"), branches$upstream)) == 0) { ui_bullets(c("v" = "Removing remote {.val {pr$pr_remote}}.")) gert::git_remote_remove(remote = pr$pr_remote, repo = repo) } invisible() } # Make sure to pull from upstream/DEFAULT (as opposed to origin/DEFAULT) if # we're in DEFAULT branch of a fork. I wish everyone set up DEFAULT to track the # DEFAULT branch in the source repo, but this protects us against sub-optimal # setup. pr_pull_source_override <- function(tr, default_branch) { # TODO: why does this not use a check_*() function, i.e. shared helper? # I guess to issue a specific error message? current_branch <- git_branch() if (current_branch != default_branch) { ui_abort(" Internal error: {.fun pr_pull_source_override} should only be used when on default branch.") } # guard against mis-configured forks, that have default branch tracking # the fork (origin) instead of the source (upstream) # TODO: should I just change the upstream tracking branch, i.e. fix it? remref <- glue("{tr$remote}/{default_branch}") if (is_online(tr$host)) { ui_bullets(c("v" = "Pulling changes from {.val {remref}}.")) git_pull(remref = remref, verbose = FALSE) } else { ui_bullets(c( "!" = "Can't reach {.val {tr$host}}, therefore unable to pull changes from {.val {remref}}." )) } } pr_create <- function() { branch <- git_branch() tracking_branch <- git_branch_tracking(branch) remote <- remref_remote(tracking_branch) remote_dat <- github_remotes(remote, github_get = FALSE) ui_bullets(c("_" = "Create PR at link given below.")) view_url(glue_data(remote_dat, "{host_url}/{repo_spec}/compare/{branch}")) } # retrieves 1 PR, if: # * we can establish a tracking relationship between `branch` and a PR branch # * we can get the user to choose 1 pr_find <- function(branch = git_branch(), tr = NULL, state = c("open", "closed", "all")) { # Have we done this before? Check if we've cached pr-url in git config. config_url <- glue("branch.{branch}.pr-url") url <- git_cfg_get(config_url, where = "local") if (!is.null(url)) { return(pr_get(number = sub("^.+pull/", "", url), tr = tr)) } tracking_branch <- git_branch_tracking(branch) if (is.na(tracking_branch)) { return(NULL) } state <- match.arg(state) remote <- remref_remote(tracking_branch) remote_dat <- github_remotes(remote) pr_head <- glue("{remote_dat$repo_owner}:{remref_branch(tracking_branch)}") pr_dat <- pr_list(tr = tr, state = state, head = pr_head) if (nrow(pr_dat) == 0) { return(NULL) } if (nrow(pr_dat) > 1) { spec <- sub(":", "/", pr_head) ui_bullets(c("!" = "Multiple PRs are associated with {.val {spec}}.")) pr_dat <- choose_pr(pr_dat = pr_dat) if (min(lengths(pr_dat)) == 0) { ui_abort(" One of these PRs must be specified explicitly or interactively: \\ {.or {paste0('#', pr_dat$pr_number)}}.") } } gert::git_config_set(config_url, pr_dat$pr_html_url, repo = git_repo()) as.list(pr_dat) } pr_url <- function(branch = git_branch(), tr = NULL, state = c("open", "closed", "all")) { state <- match.arg(state) pr <- pr_find(branch, tr = tr, state = state) if (is.null(pr)) { NULL } else { pr$pr_html_url } } pr_data_tidy <- function(pr) { out <- list( pr_number = pluck_int(pr, "number"), pr_title = pluck_chr(pr, "title"), pr_state = pluck_chr(pr, "state"), pr_user = pluck_chr(pr, "user", "login"), pr_created_at = pluck_chr(pr, "created_at"), pr_updated_at = pluck_chr(pr, "updated_at"), pr_merged_at = pluck_chr(pr, "merged_at"), pr_label = pluck_chr(pr, "head", "label"), # the 'repo' element of 'head' is NULL when fork has been deleted pr_repo_owner = pluck_chr(pr, "head", "repo", "owner", "login"), pr_ref = pluck_chr(pr, "head", "ref"), pr_repo_spec = pluck_chr(pr, "head", "repo", "full_name"), pr_from_fork = pluck_lgl(pr, "head", "repo", "fork"), # 'maintainer_can_modify' is only present when we GET one specific PR pr_maintainer_can_modify = pluck_lgl(pr, "maintainer_can_modify"), pr_https_url = pluck_chr(pr, "head", "repo", "clone_url"), pr_ssh_url = pluck_chr(pr, "head", "repo", "ssh_url"), pr_html_url = pluck_chr(pr, "html_url"), pr_string = glue(" {pluck_chr(pr, 'base', 'repo', 'full_name')}/#{pluck_int(pr, 'number')}") ) grl <- github_remote_list(these = NULL) m <- match(out$pr_repo_spec, grl$repo_spec) out$pr_remote <- if (is.na(m)) out$pr_repo_owner else grl$remote[m] pr_remref <- glue("{out$pr_remote}/{out$pr_ref}") gbl <- gert::git_branch_list(local = TRUE, repo = git_repo()) gbl <- gbl[!is.na(gbl$upstream), c("name", "upstream")] gbl$upstream <- sub("^refs/remotes/", "", gbl$upstream) m <- match(pr_remref, gbl$upstream) out$pr_local_branch <- if (is.na(m)) NA_character_ else gbl$name[m] # If the fork has been deleted, these are all NA # - Because pr$head$repo is NULL: # pr_repo_owner, pr_repo_spec, pr_from_fork, pr_https_url, pr_ssh_url # - Because derived from those above: # pr_remote, pr_remref pr_local_branch # I suppose one could already have a local branch, if you fetched the PR # before the fork got deleted. # But an initial pr_fetch() won't work if the fork has been deleted. # I'm willing to accept that the pr_*() functions don't necessarily address # the "deleted fork" scenario. It's relatively rare. # example: https://github.com/r-lib/httr/pull/634 out } pr_list <- function(tr = NULL, github_get = NA, state = c("open", "closed", "all"), head = NULL) { tr <- tr %||% target_repo(github_get = github_get, ask = FALSE) state <- match.arg(state) gh <- gh_tr(tr) safely_gh <- purrr::safely(gh, otherwise = NULL) out <- safely_gh( "GET /repos/{owner}/{repo}/pulls", state = state, head = head, .limit = Inf ) if (is.null(out$error)) { prs <- out$result } else { ui_bullets(c("x" = "Unable to retrieve PRs for {.value {tr$repo_spec}}.")) prs <- NULL } no_prs <- length(prs) == 0 if (no_prs) { prs <- list(list()) } out <- map(prs, pr_data_tidy) out <- map(out, ~ as.data.frame(.x, stringsAsFactors = FALSE)) out <- do.call(rbind, out) if (no_prs) { out[0, ] } else { pr_is_open <- out$pr_state == "open" rbind(out[pr_is_open, ], out[!pr_is_open, ]) } } # retrieves specific PR by number pr_get <- function(number, tr = NULL, github_get = NA) { tr <- tr %||% target_repo(github_get = github_get, ask = FALSE) gh <- gh_tr(tr) raw <- gh("GET /repos/{owner}/{repo}/pulls/{number}", number = number) pr_data_tidy(raw) } branches_with_no_upstream_or_github_upstream <- function(tr = NULL) { repo <- git_repo() gb_dat <- gert::git_branch_list(local = TRUE, repo = repo) gb_dat <- gb_dat[, c("name", "upstream", "updated")] gb_dat$remref <- sub("^refs/remotes/", "", gb_dat$upstream) gb_dat$upstream <- NULL gb_dat$remote <- remref_remote(gb_dat$remref) gb_dat$ref <- remref_branch(gb_dat$remref) gb_dat$cfg_pr_url <- map_chr( glue("branch.{gb_dat$name}.pr-url"), ~ git_cfg_get(.x, where = "local") %||% NA_character_ ) ghr <- github_remote_list(these = NULL)[["remote"]] gb_dat <- gb_dat[is.na(gb_dat$remref) | (gb_dat$remote %in% ghr), ] pr_dat <- pr_list(tr = tr) dat <- merge( x = gb_dat, y = pr_dat, by.x = "name", by.y = "pr_local_branch", all.x = TRUE ) dat <- dat[order(dat$pr_number, dat$pr_updated_at, dat$updated, decreasing = TRUE), ] missing_cfg <- is.na(dat$cfg_pr_url) & !is.na(dat$pr_html_url) purrr::walk2( glue("branch.{dat$name[missing_cfg]}.pr-url"), dat$pr_html_url[missing_cfg], ~ gert::git_config_set(.x, .y, repo = repo) ) dat } choose_branch <- function(exclude = character()) { if (!is_interactive()) { return(character()) } dat <- branches_with_no_upstream_or_github_upstream() dat <- dat[!dat$name %in% exclude, ] if (nrow(dat) == 0) { return() } prompt <- "Which branch do you want to checkout? (0 to exit)" n_show_max <- 9 n <- nrow(dat) n_shown <- compute_n_show(n, n_show_nominal = n_show_max) n_not_shown <- n - n_shown if (n_not_shown > 0) { branches_not_shown <- utils::tail(dat$name, -n_shown) dat <- dat[seq_len(n_shown), ] fine_print <- cli::format_inline( "{n_not_shown} branch{?/es} not listed: {.val {branches_not_shown}}" ) prompt <- glue("{prompt}\n{fine_print}") } dat$pretty_name <- format(dat$name, justify = "right") dat_pretty <- purrr::pmap_chr( dat[c("pretty_name", "pr_number", "pr_html_url", "pr_user", "pr_title")], function(pretty_name, pr_number, pr_html_url, pr_user, pr_title) { if (is.na(pr_number)) { pretty_name } else { href_number <- ui_pre_glue("{.href [PR #<>](<>)}") at_user <- glue("@{pr_user}") template <- ui_pre_glue( "{pretty_name} {cli::symbol$arrow_right} <> ({.field <>}): {.val <>}" ) cli::format_inline(template) } } ) choice <- utils::menu(title = prompt, choices = cli::ansi_strtrim(dat_pretty)) dat$name[choice] } choose_pr <- function(tr = NULL, pr_dat = NULL) { if (!is_interactive()) { return(list(pr_number = list())) } if (is.null(pr_dat)) { tr <- tr %||% target_repo() pr_dat <- pr_list(tr) } if (nrow(pr_dat) == 0) { return() } # wording needs to make sense for several PR-choosing tasks, e.g. fetch, view, # finish, forget prompt <- "Which PR are you interested in? (0 to exit)" n_show_max <- 9 n <- nrow(pr_dat) n_shown <- compute_n_show(n, n_show_nominal = n_show_max) n_not_shown <- n - n_shown if (n_not_shown > 0) { pr_dat <- pr_dat[seq_len(n_shown), ] info1 <- cli::format_inline("Not shown: {n_not_shown} more PR{?s}.") info2 <- cli::format_inline( "Call {.run usethis::browse_github_pulls()} to browse all PRs." ) prompt <- glue("{prompt}\n{info1}\n{info2}") } some_closed <- any(pr_dat$pr_state == "closed") pr_pretty <- purrr::pmap_chr( pr_dat[c("pr_number", "pr_html_url", "pr_user", "pr_state", "pr_title")], function(pr_number, pr_html_url, pr_user, pr_state, pr_title) { href_number <- ui_pre_glue("{.href [PR #<>](<>)}") at_user <- glue("@{pr_user}") if (some_closed) { template <- ui_pre_glue( "<> ({.field <>}, {pr_state}): {.val <>}" ) cli::format_inline(template) } else { template <- ui_pre_glue( "<> ({.field <>}): {.val <>}" ) cli::format_inline(template) } } ) choice <- utils::menu( title = prompt, choices = cli::ansi_strtrim(pr_pretty) ) as.list(pr_dat[choice, ]) } # deletes the remote branch associated with a PR # returns invisible TRUE/FALSE re: whether a deletion actually occurred # reasons this returns FALSE # * don't have push permission on remote where PR branch lives # * PR has not been merged # * remote branch has already been deleted pr_branch_delete <- function(pr) { remote <- pr$pr_remote remote_dat <- github_remotes(remote) if (!isTRUE(remote_dat$can_push)) { return(invisible(FALSE)) } gh <- gh_tr(remote_dat) pr_ref <- tryCatch( gh( "GET /repos/{owner}/{repo}/git/ref/{ref}", ref = glue("heads/{pr$pr_ref}") ), http_error_404 = function(cnd) NULL ) pr_remref <- glue_data(pr, "{pr_remote}/{pr_ref}") if (is.null(pr_ref)) { ui_bullets(c( "i" = "PR {.href [{pr$pr_string}]({pr$pr_html_url})} originated from branch {.val {pr_remref}}, which no longer exists." )) return(invisible(FALSE)) } if (is.na(pr$pr_merged_at)) { ui_bullets(c( "i" = "PR {.href [{pr$pr_string}]({pr$pr_html_url})} is unmerged, we will not delete the remote branch {.val {pr_remref}}." )) return(invisible(FALSE)) } ui_bullets(c( "v" = "PR {.href [{pr$pr_string}]({pr$pr_html_url})} has been merged, deleting remote branch {.val {pr_remref}}." )) # TODO: tryCatch here? gh( "DELETE /repos/{owner}/{repo}/git/refs/{ref}", ref = glue("heads/{pr$pr_ref}") ) invisible(TRUE) } check_pr_branch <- function(default_branch) { # the glue-ing happens inside check_current_branch(), where `gb` gives the # current git branch check_current_branch( is_not = default_branch, message = c( "i" = "The {.code pr_*()} functions facilitate pull requests.", "i" = "The current branch ({.val {gb}}) is this repo's default branch, but pull requests should NOT come from the default branch.", "i" = "Do you need to call {.fun usethis::pr_init} (new PR)? Or {.fun usethis::pr_resume} or {.fun usethis::pr_fetch} (existing PR)?" ) ) } usethis/R/import-standalone-types-check.R0000644000176200001440000002772214717524721020163 0ustar liggesusers# Standalone file: do not edit by hand # Source: https://github.com/r-lib/rlang/blob/HEAD/R/standalone-types-check.R # Generated by: usethis::use_standalone("r-lib/rlang", "types-check") # ---------------------------------------------------------------------- # # --- # repo: r-lib/rlang # file: standalone-types-check.R # last-updated: 2023-03-13 # license: https://unlicense.org # dependencies: standalone-obj-type.R # imports: rlang (>= 1.1.0) # --- # # ## Changelog # # 2023-03-13: # - Improved error messages of number checkers (@teunbrand) # - Added `allow_infinite` argument to `check_number_whole()` (@mgirlich). # - Added `check_data_frame()` (@mgirlich). # # 2023-03-07: # - Added dependency on rlang (>= 1.1.0). # # 2023-02-15: # - Added `check_logical()`. # # - `check_bool()`, `check_number_whole()`, and # `check_number_decimal()` are now implemented in C. # # - For efficiency, `check_number_whole()` and # `check_number_decimal()` now take a `NULL` default for `min` and # `max`. This makes it possible to bypass unnecessary type-checking # and comparisons in the default case of no bounds checks. # # 2022-10-07: # - `check_number_whole()` and `_decimal()` no longer treat # non-numeric types such as factors or dates as numbers. Numeric # types are detected with `is.numeric()`. # # 2022-10-04: # - Added `check_name()` that forbids the empty string. # `check_string()` allows the empty string by default. # # 2022-09-28: # - Removed `what` arguments. # - Added `allow_na` and `allow_null` arguments. # - Added `allow_decimal` and `allow_infinite` arguments. # - Improved errors with absent arguments. # # # 2022-09-16: # - Unprefixed usage of rlang functions with `rlang::` to # avoid onLoad issues when called from rlang (#1482). # # 2022-08-11: # - Added changelog. # # nocov start # Scalars ----------------------------------------------------------------- .standalone_types_check_dot_call <- .Call check_bool <- function(x, ..., allow_na = FALSE, allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (!missing(x) && .standalone_types_check_dot_call(ffi_standalone_is_bool_1.0.7, x, allow_na, allow_null)) { return(invisible(NULL)) } stop_input_type( x, c("`TRUE`", "`FALSE`"), ..., allow_na = allow_na, allow_null = allow_null, arg = arg, call = call ) } check_string <- function(x, ..., allow_empty = TRUE, allow_na = FALSE, allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (!missing(x)) { is_string <- .rlang_check_is_string( x, allow_empty = allow_empty, allow_na = allow_na, allow_null = allow_null ) if (is_string) { return(invisible(NULL)) } } stop_input_type( x, "a single string", ..., allow_na = allow_na, allow_null = allow_null, arg = arg, call = call ) } .rlang_check_is_string <- function(x, allow_empty, allow_na, allow_null) { if (is_string(x)) { if (allow_empty || !is_string(x, "")) { return(TRUE) } } if (allow_null && is_null(x)) { return(TRUE) } if (allow_na && (identical(x, NA) || identical(x, na_chr))) { return(TRUE) } FALSE } check_name <- function(x, ..., allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (!missing(x)) { is_string <- .rlang_check_is_string( x, allow_empty = FALSE, allow_na = FALSE, allow_null = allow_null ) if (is_string) { return(invisible(NULL)) } } stop_input_type( x, "a valid name", ..., allow_na = FALSE, allow_null = allow_null, arg = arg, call = call ) } IS_NUMBER_true <- 0 IS_NUMBER_false <- 1 IS_NUMBER_oob <- 2 check_number_decimal <- function(x, ..., min = NULL, max = NULL, allow_infinite = TRUE, allow_na = FALSE, allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (missing(x)) { exit_code <- IS_NUMBER_false } else if (0 == (exit_code <- .standalone_types_check_dot_call( ffi_standalone_check_number_1.0.7, x, allow_decimal = TRUE, min, max, allow_infinite, allow_na, allow_null ))) { return(invisible(NULL)) } .stop_not_number( x, ..., exit_code = exit_code, allow_decimal = TRUE, min = min, max = max, allow_na = allow_na, allow_null = allow_null, arg = arg, call = call ) } check_number_whole <- function(x, ..., min = NULL, max = NULL, allow_infinite = FALSE, allow_na = FALSE, allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (missing(x)) { exit_code <- IS_NUMBER_false } else if (0 == (exit_code <- .standalone_types_check_dot_call( ffi_standalone_check_number_1.0.7, x, allow_decimal = FALSE, min, max, allow_infinite, allow_na, allow_null ))) { return(invisible(NULL)) } .stop_not_number( x, ..., exit_code = exit_code, allow_decimal = FALSE, min = min, max = max, allow_na = allow_na, allow_null = allow_null, arg = arg, call = call ) } .stop_not_number <- function(x, ..., exit_code, allow_decimal, min, max, allow_na, allow_null, arg, call) { if (allow_decimal) { what <- "a number" } else { what <- "a whole number" } if (exit_code == IS_NUMBER_oob) { min <- min %||% -Inf max <- max %||% Inf if (min > -Inf && max < Inf) { what <- sprintf("%s between %s and %s", what, min, max) } else if (x < min) { what <- sprintf("%s larger than or equal to %s", what, min) } else if (x > max) { what <- sprintf("%s smaller than or equal to %s", what, max) } else { abort("Unexpected state in OOB check", .internal = TRUE) } } stop_input_type( x, what, ..., allow_na = allow_na, allow_null = allow_null, arg = arg, call = call ) } check_symbol <- function(x, ..., allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (!missing(x)) { if (is_symbol(x)) { return(invisible(NULL)) } if (allow_null && is_null(x)) { return(invisible(NULL)) } } stop_input_type( x, "a symbol", ..., allow_na = FALSE, allow_null = allow_null, arg = arg, call = call ) } check_arg <- function(x, ..., allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (!missing(x)) { if (is_symbol(x)) { return(invisible(NULL)) } if (allow_null && is_null(x)) { return(invisible(NULL)) } } stop_input_type( x, "an argument name", ..., allow_na = FALSE, allow_null = allow_null, arg = arg, call = call ) } check_call <- function(x, ..., allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (!missing(x)) { if (is_call(x)) { return(invisible(NULL)) } if (allow_null && is_null(x)) { return(invisible(NULL)) } } stop_input_type( x, "a defused call", ..., allow_na = FALSE, allow_null = allow_null, arg = arg, call = call ) } check_environment <- function(x, ..., allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (!missing(x)) { if (is_environment(x)) { return(invisible(NULL)) } if (allow_null && is_null(x)) { return(invisible(NULL)) } } stop_input_type( x, "an environment", ..., allow_na = FALSE, allow_null = allow_null, arg = arg, call = call ) } check_function <- function(x, ..., allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (!missing(x)) { if (is_function(x)) { return(invisible(NULL)) } if (allow_null && is_null(x)) { return(invisible(NULL)) } } stop_input_type( x, "a function", ..., allow_na = FALSE, allow_null = allow_null, arg = arg, call = call ) } check_closure <- function(x, ..., allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (!missing(x)) { if (is_closure(x)) { return(invisible(NULL)) } if (allow_null && is_null(x)) { return(invisible(NULL)) } } stop_input_type( x, "an R function", ..., allow_na = FALSE, allow_null = allow_null, arg = arg, call = call ) } check_formula <- function(x, ..., allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (!missing(x)) { if (is_formula(x)) { return(invisible(NULL)) } if (allow_null && is_null(x)) { return(invisible(NULL)) } } stop_input_type( x, "a formula", ..., allow_na = FALSE, allow_null = allow_null, arg = arg, call = call ) } # Vectors ----------------------------------------------------------------- check_character <- function(x, ..., allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (!missing(x)) { if (is_character(x)) { return(invisible(NULL)) } if (allow_null && is_null(x)) { return(invisible(NULL)) } } stop_input_type( x, "a character vector", ..., allow_na = FALSE, allow_null = allow_null, arg = arg, call = call ) } check_logical <- function(x, ..., allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (!missing(x)) { if (is_logical(x)) { return(invisible(NULL)) } if (allow_null && is_null(x)) { return(invisible(NULL)) } } stop_input_type( x, "a logical vector", ..., allow_na = FALSE, allow_null = allow_null, arg = arg, call = call ) } check_data_frame <- function(x, ..., allow_null = FALSE, arg = caller_arg(x), call = caller_env()) { if (!missing(x)) { if (is.data.frame(x)) { return(invisible(NULL)) } if (allow_null && is_null(x)) { return(invisible(NULL)) } } stop_input_type( x, "a data frame", ..., allow_null = allow_null, arg = arg, call = call ) } # nocov end usethis/R/version.R0000644000176200001440000001017614651000165013752 0ustar liggesusers#' Increment package version #' #' @description #' #' usethis supports semantic versioning, which is described in more detail in #' the [version #' section](https://r-pkgs.org/lifecycle.html#sec-lifecycle-version-number) of [R #' Packages](https://r-pkgs.org). A version number breaks down like so: #' #' ``` #' .. (released version) #' ... (dev version) #' ``` #' `use_version()` increments the "Version" field in `DESCRIPTION`, adds a new #' heading to `NEWS.md` (if it exists), commits those changes (if package uses #' Git), and optionally pushes (if safe to do so). It makes the same update to a #' line like `PKG_version = "x.y.z";` in `src/version.c` (if it exists). #' #' `use_dev_version()` increments to a development version, e.g. from 1.0.0 to #' 1.0.0.9000. If the existing version is already a development version with #' four components, it does nothing. Thin wrapper around `use_version()`. #' #' @param which A string specifying which level to increment, one of: "major", #' "minor", "patch", "dev". If `NULL`, user can choose interactively. #' #' @seealso The [version #' section](https://r-pkgs.org/lifecycle.html#sec-lifecycle-version-number) of [R #' Packages](https://r-pkgs.org). #' #' @examples #' \dontrun{ #' ## for interactive selection, do this: #' use_version() #' #' ## request a specific type of increment #' use_version("minor") #' use_dev_version() #' } #' #' @name use_version NULL #' @rdname use_version #' @param push If `TRUE`, also attempts to push the commits to the remote #' branch. #' @export use_version <- function(which = NULL, push = FALSE) { if (is.null(which) && !is_interactive()) { return(invisible(FALSE)) } check_is_package("use_version()") challenge_uncommitted_changes( msg = "There are uncommitted changes and you're about to bump version" ) new_ver <- choose_version("What should the new version be?", which) if (is.null(new_ver)) { return(invisible(FALSE)) } proj_desc_field_update("Version", new_ver, overwrite = TRUE) if (names(new_ver) == "dev") { use_news_heading("(development version)") } else { use_news_heading(new_ver) } use_c_version(new_ver) git_ask_commit( glue("Increment version number to {new_ver}"), untracked = TRUE, push = push, paths = c("DESCRIPTION", "NEWS.md", path("src", "version.c")) ) invisible(TRUE) } #' @rdname use_version #' @export use_dev_version <- function(push = FALSE) { check_is_package("use_dev_version()") if (is_dev_version()) { return(invisible()) } use_version(which = "dev", push = push) } choose_version <- function(message, which = NULL) { versions <- bump_version() rtypes <- names(versions) which <- which %||% rtypes which <- arg_match(which, values = rtypes, multiple = TRUE) versions <- versions[which] if (length(versions) == 1) { return(versions) } choice <- utils::menu( choices = glue( "{format(names(versions), justify = 'right')} --> {versions}" ), title = glue( "Current version is {proj_version()}.\n", "{message} (0 to exit)" ) ) if (choice == 0) { invisible() } else { # Not using `[[` even though there is only 1 `choice`, # because that removes the names from `versions` versions[choice] } } bump_version <- function(ver = proj_version()) { bumps <- c("major", "minor", "patch", "dev") vapply(bumps, bump_, character(1), ver = ver) } bump_ <- function(x, ver) { d <- desc::desc(text = paste0("Version: ", ver)) suppressMessages(d$bump_version(x)$get("Version")[[1]]) } use_c_version <- function(ver) { version_path <- proj_path("src", "version.c") if (!file_exists(version_path)) { return() } hint <- glue("{project_name()}_version") ui_bullets(c( "v" = "Setting {.field {hint}} to {.val {ver}} {.path {pth(version_path)}}." )) lines <- read_utf8(version_path) re <- glue("(^.*{project_name()}_version = \")([0-9.]+)(\";$)") lines <- gsub(re, glue("\\1{ver}\\3"), lines) write_utf8(version_path, lines) } is_dev_version <- function(version = proj_version()) { ver <- package_version(version) length(unlist(ver)) > 3 } usethis/R/use_import_from.R0000644000176200001440000000432414651514262015504 0ustar liggesusers#' Import a function from another package #' #' @description #' `use_import_from()` imports a function from another package by adding the #' roxygen2 `@importFrom` tag to the package-level documentation (which can be #' created with [`use_package_doc()`]). Importing a function from another #' package allows you to refer to it without a namespace (e.g., `fun()` instead #' of `package::fun()`). #' #' `use_import_from()` also re-documents the NAMESPACE, and re-load the current #' package. This ensures that `fun` is immediately available in your development #' session. #' #' @param package Package name #' @param fun A vector of function names #' @param load Logical. Re-load with [`pkgload::load_all()`]? #' @return #' Invisibly, `TRUE` if the package document has changed, `FALSE` if not. #' @export #' @examples #' \dontrun{ #' use_import_from("glue", "glue") #' } use_import_from <- function(package, fun, load = is_interactive()) { if (!is_string(package)) { ui_abort("{.arg package} must be a single string.") } check_is_package("use_import_from()") check_uses_roxygen("use_import_from()") check_installed(package) check_has_package_doc("use_import_from()") check_functions_exist(package, fun) use_dependency(package, "Imports") changed <- roxygen_ns_append(glue("@importFrom {package} {fun}")) if (changed) { roxygen_update_ns(load) } invisible(changed) } check_functions_exist <- function(package, fun) { purrr::walk2(package, fun, check_fun_exists) } check_fun_exists <- function(package, fun) { if (exists(fun, envir = asNamespace(package))) { return() } name <- paste0(package, "::", fun) ui_abort("Can't find {.fun {name}}.") } check_has_package_doc <- function(whos_asking) { if (has_package_doc()) { return(invisible(TRUE)) } whos_asking_fn <- sub("()", "", whos_asking, fixed = TRUE) msg <- c( "!" = "{.fun {whos_asking_fn}} requires package-level documentation.", " " = "Would you like to add it now?" ) if (is_interactive() && ui_yep(msg)) { use_package_doc() } else { ui_abort(c( "{.fun {whos_asking_fn}} requires package-level documentation.", "You can add it by running {.run usethis::use_package_doc()}." )) } invisible(TRUE) } usethis/R/pipe.R0000644000176200001440000000203214717524721013226 0ustar liggesusers#' Use magrittr's pipe in your package #' #' Does setup necessary to use magrittr's pipe operator, `%>%` in your package. #' This function requires the use of \pkg{roxygen2}. #' * Adds magrittr to "Imports" in `DESCRIPTION`. #' * Imports the pipe operator specifically, which is necessary for internal #' use. #' * Exports the pipe operator, if `export = TRUE`, which is necessary to make #' `%>%` available to the users of your package. #' #' @param export If `TRUE`, the file `R/utils-pipe.R` is added, which provides #' the roxygen template to import and re-export `%>%`. If `FALSE`, the necessary #' roxygen directive is added, if possible, or otherwise instructions are given. #' #' @export #' #' @examples #' \dontrun{ #' use_pipe() #' } use_pipe <- function(export = TRUE) { check_is_package("use_pipe()") check_uses_roxygen("use_pipe()") if (export) { use_dependency("magrittr", "Imports") use_template("pipe.R", "R/utils-pipe.R") && roxygen_remind() return(invisible(TRUE)) } use_import_from("magrittr", "%>%") } usethis/R/utils-glue.R0000644000176200001440000000036414651000165014355 0ustar liggesusers# wrappers that apply as.character() to glue functions glue_chr <- function(...) { as.character(glue(..., .envir = parent.frame(1))) } glue_data_chr <- function(.x, ...) { as.character(glue_data(.x = .x, ..., .envir = parent.frame(1))) } usethis/R/tibble.R0000644000176200001440000000301614651000165013521 0ustar liggesusers#' Prepare to return a tibble #' #' @description #' #' `r lifecycle::badge("questioning")` #' #' Does minimum setup such that a tibble returned by your package #' is handled using the tibble method for generics like `print()` or \code{[}. #' Presumably you care about this if you've chosen to store and expose an #' object with class `tbl_df`. Specifically: #' * Check that the active package uses roxygen2 #' * Add the tibble package to "Imports" in `DESCRIPTION` #' * Prepare the roxygen directive necessary to import at least one function #' from tibble: #' - If possible, the directive is inserted into existing package-level #' documentation, i.e. the roxygen snippet created by [use_package_doc()] #' - Otherwise, we issue advice on where the user should add the directive #' #' This is necessary when your package returns a stored data object that has #' class `tbl_df`, but the package code does not make direct use of functions #' from the tibble package. If you do nothing, the tibble namespace is not #' necessarily loaded and your tibble may therefore be printed and subsetted #' like a base `data.frame`. #' #' @export #' @examples #' \dontrun{ #' use_tibble() #' } use_tibble <- function() { check_is_package("use_tibble()") check_uses_roxygen("use_tibble()") created <- use_import_from("tibble", "tibble") ui_bullets(c("_" = "Document a returned tibble like so:")) ui_code_snippet( "#' @return a [tibble][tibble::tibble-package]", language = "", copy = FALSE ) invisible(created) } usethis/R/utils-git.R0000644000176200001440000003165114717524762014230 0ustar liggesusers# gert ------------------------------------------------------------------------- gert_shush <- function(expr, regexp) { check_character(regexp) withCallingHandlers( gertMessage = function(cnd) { m <- map_lgl(regexp, ~ grepl(.x, cnd_message(cnd), perl = TRUE)) if (any(m)) { cnd_muffle(cnd) } }, expr ) } # Repository ------------------------------------------------------------------- git_repo <- function() { check_uses_git() proj_get() } uses_git <- function() { repo <- tryCatch( gert::git_find(proj_get()), error = function(e) NULL ) !is.null(repo) } check_uses_git <- function() { if (uses_git()) { return(invisible()) } ui_abort(c( "Cannot detect that project is already a Git repository.", "Do you need to run {.run usethis::use_git()}?" )) } git_init <- function() { gert::git_init(proj_get()) } # Config ----------------------------------------------------------------------- # `where = "de_facto"` means look at the values that are "in force", i.e. where # local repo variables override global user-level variables, when both are # defined # # `where = "local"` is strict, i.e. it only returns a value that is in the local # config git_cfg_get <- function(name, where = c("de_facto", "local", "global")) { where <- match.arg(where) if (where == "de_facto") { return(git_cfg_get(name, "local") %||% git_cfg_get(name, "global")) } if (where == "global" || !uses_git()) { dat <- gert::git_config_global() } else { dat <- gert::git_config(repo = git_repo()) } if (where == "local") { dat <- dat[dat$level == "local", ] } out <- dat$value[tolower(dat$name) == tolower(name)] if (length(out) > 0) out else NULL } # more-specific case for user-name and -email git_user_get <- function(where = c("de_facto", "local", "global")) { where <- match.arg(where) list( name = git_cfg_get("user.name", where), email = git_cfg_get("user.email", where) ) } # translate from "usethis" terminology to "git" terminology where_from_scope <- function(scope = c("user", "project")) { scope <- match.arg(scope) where_scope <- c(user = "global", project = "de_facto") where_scope[scope] } # ensures that core.excludesFile is configured # if configured, leave well enough alone # if not, check for existence of one of the Usual Suspects; if found, configure # otherwise, configure as path_home(".gitignore") ensure_core_excludesFile <- function() { path <- git_ignore_path(scope = "user") if (!is.null(path)) { return(invisible()) } # .gitignore is most common, but .gitignore_global appears in prominent # places --> so we allow the latter, but prefer the former path <- path_first_existing(path_home(c(".gitignore", ".gitignore_global"))) %||% path_home(".gitignore") if (!is_windows()) { # express path relative to user's home directory, except on Windows path <- path("~", path_rel(path, path_home())) } ui_bullets(c( "v" = "Configuring {.field core.excludesFile}: {.path {pth(path)}}" )) gert::git_config_global_set("core.excludesFile", path) invisible() } # Status------------------------------------------------------------------------ git_status <- function(untracked) { check_bool(untracked) st <- gert::git_status(repo = git_repo()) if (!untracked) { st <- st[st$status != "new", ] } st } # Commit ----------------------------------------------------------------------- git_ask_commit <- function(message, untracked, push = FALSE, paths = NULL) { if (!is_interactive() || !uses_git()) { return(invisible()) } # this is defined here to encourage all commits to route through this function git_commit <- function(paths, message) { repo <- git_repo() ui_bullets(c("v" = "Adding files.")) gert::git_add(paths, repo = repo) ui_bullets(c("v" = "Making a commit with message {.val {message}}.")) gert::git_commit(message, repo = repo) } uncommitted <- git_status(untracked)$file if (is.null(paths)) { paths <- uncommitted } else { paths <- intersect(paths, uncommitted) } n <- length(paths) if (n == 0) { return(invisible()) } paths <- sort(paths) ui_paths <- usethis_map_cli(paths, template = '{.path {pth("<>")}}') file_hint <- "{cli::qty(n)}There {?is/are} {n} uncommitted file{?s}:" ui_bullets(c( "i" = file_hint, bulletize(ui_paths, n_show = 10) )) # Only push if no remote & a single change push <- push && git_can_push(max_local = 1) if (ui_yep(c( "!" = "Is it ok to commit {if (push) 'and push '} {cli::qty(n)} {?it/them}?" ))) { git_commit(paths, message) if (push) { git_push() } } invisible() } git_uncommitted <- function(untracked = FALSE) { nrow(git_status(untracked)) > 0 } challenge_uncommitted_changes <- function(untracked = FALSE, msg = NULL) { if (!uses_git()) { return(invisible()) } if (rstudioapi::hasFun("documentSaveAll")) { rstudioapi::documentSaveAll() } default_msg <- " There are uncommitted changes, which may cause problems or be lost when \\ we push, pull, switch, or compare branches" msg <- glue(msg %||% default_msg) if (git_uncommitted(untracked = untracked)) { if (ui_yep(c( "!" = msg, " " = "Do you want to proceed anyway?" ))) { return(invisible()) } else { ui_abort("Uncommitted changes. Please commit before continuing.") } } } git_conflict_report <- function() { st <- git_status(untracked = FALSE) conflicted <- st$file[st$status == "conflicted"] n <- length(conflicted) if (n == 0) { return(invisible()) } conflicted_paths <- usethis_map_cli( conflicted, template = '{.path {pth("<>")}}' ) file_hint <- "{cli::qty(n)}There {?is/are} {n} conflicted file{?s}:" ui_bullets(c( "i" = file_hint, bulletize(conflicted_paths, n_show = 10) )) yes <- "Yes, open the conflicted files for editing." yes_soft <- "Yes, but do not open the conflicted files." no <- "No, I want to abort this merge." choice <- utils::menu( title = "Do you want to proceed with this merge?", choices = c(yes, yes_soft, no) ) if (choice < 1 || choice > 2) { gert::git_merge_abort(repo = git_repo()) ui_abort("Abandoning the merge, since it will cause merge conflicts.") } if (choice == 1) { ui_silence(purrr::walk(conflicted, edit_file)) } ui_abort(c( "Please fix each conflict, save, stage, and commit.", "To back out of this merge, run {.code gert::git_merge_abort()} (in R) or {.code git merge --abort} (in the shell)." )) } # Remotes ---------------------------------------------------------------------- ## remref --> remote, branch git_parse_remref <- function(remref) { regex <- paste0("^", names(git_remotes()), collapse = "|") regex <- glue("({regex})/(.*)") list(remote = sub(regex, "\\1", remref), branch = sub(regex, "\\2", remref)) } remref_remote <- function(remref) git_parse_remref(remref)$remote remref_branch <- function(remref) git_parse_remref(remref)$branch # Pull ------------------------------------------------------------------------- # Pull from remref or upstream tracking. If neither given/exists, do nothing. # Therefore, this does less than `git pull`. git_pull <- function(remref = NULL, verbose = TRUE) { check_string(remref, allow_na = TRUE, allow_null = TRUE) repo <- git_repo() branch <- git_branch() remref <- remref %||% git_branch_tracking(branch) if (is.na(remref)) { if (verbose) { ui_bullets(c("v" = "No remote branch to pull from for {.val {branch}}.")) } return(invisible()) } if (verbose) { ui_bullets(c("v" = "Pulling from {.val {remref}}.")) } gert::git_fetch( remote = remref_remote(remref), refspec = remref_branch(remref), repo = repo, verbose = FALSE ) # this is pretty brittle, because I've hard-wired these messages # https://github.com/r-lib/gert/blob/main/R/merge.R # but at time of writing, git_merge() offers no verbosity control gert_shush( regexp = c( "Already up to date, nothing to merge", "Performing fast-forward merge, no commit needed" ), gert::git_merge(remref, repo = repo) ) st <- git_status(untracked = TRUE) if (any(st$status == "conflicted")) { git_conflict_report() } invisible() } # Branch ------------------------------------------------------------------ git_branch <- function() { info <- gert::git_info(repo = git_repo()) branch <- info$shorthand if (identical(branch, "HEAD")) { ui_abort("Detached head; can't continue.") } if (is.na(branch)) { ui_abort("On an unborn branch -- do you need to make an initial commit?") } branch } git_branch_tracking <- function(branch = git_branch()) { repo <- git_repo() if (!gert::git_branch_exists(branch, local = TRUE, repo = repo)) { ui_abort("There is no local branch named {.val {branch}}.") } gbl <- gert::git_branch_list(local = TRUE, repo = repo) sub("^refs/remotes/", "", gbl$upstream[gbl$name == branch]) } git_branch_compare <- function(branch = git_branch(), remref = NULL) { remref <- remref %||% git_branch_tracking(branch) gert::git_fetch( remote = remref_remote(remref), refspec = remref_branch(remref), repo = git_repo(), verbose = FALSE ) out <- gert::git_ahead_behind(upstream = remref, ref = branch, repo = git_repo()) list(local_only = out$ahead, remote_only = out$behind) } git_can_push <- function(max_local = Inf, branch = git_branch(), remref = NULL) { remref <- remref %||% git_branch_tracking(branch) if (is.null(remref)) { return(FALSE) } comp <- git_branch_compare(branch, remref) comp$remote_only == 0 && comp$local_only <= max_local } git_push <- function(branch = git_branch(), remref = NULL, verbose = TRUE) { remref <- remref %||% git_branch_tracking(branch) if (verbose) { ui_bullets(c( "v" = "Pushing local {.val {branch}} branch to {.val {remref}}." )) } gert::git_push( remote = remref_remote(remref), refspec = glue("refs/heads/{branch}:refs/heads/{remref_branch(remref)}"), verbose = FALSE, repo = git_repo() ) } git_push_first <- function(branch = git_branch(), remote = "origin", verbose = TRUE) { if (verbose) { remref <- glue("{remote}/{branch}") ui_bullets(c( "v" = "Pushing {.val {branch}} branch to GitHub and setting {.val {remref}} as upstream branch." )) } gert::git_push( remote = remote, set_upstream = TRUE, verbose = FALSE, repo = git_repo() ) } # Checks ------------------------------------------------------------------ check_current_branch <- function(is = NULL, is_not = NULL, message = NULL) { gb <- git_branch() if (!is.null(is)) { check_string(is) if (gb == is) { return(invisible()) } else { if (is.null(message)) { message <- c("x" = "Must be on branch {.val {is}}, not {.val {gb}}.") } ui_abort(message) } } if (!is.null(is_not)) { check_string(is_not) if (gb != is_not) { return(invisible()) } else { if (is.null(message)) { message <- c("x" = "Can't be on branch {.val {gb}}.") } ui_abort(message) } } invisible() } # examples of remref: upstream/main, origin/foofy check_branch_up_to_date <- function(direction = c("pull", "push"), remref = NULL, use = NULL) { direction <- match.arg(direction) branch <- git_branch() remref <- remref %||% git_branch_tracking(branch) use <- use %||% switch(direction, pull = "git pull", push = "git push") if (is.na(remref)) { ui_bullets(c( "i" = "Local branch {.val {branch}} is not tracking a remote branch." )) return(invisible()) } if (direction == "pull") { ui_bullets(c( "v" = "Checking that local branch {.val {branch}} has the changes in {.val {remref}}." )) } else { ui_bullets(c( "v" = "Checking that remote branch {.val {remref}} has the changes in {.val {branch}}." )) } comparison <- git_branch_compare(branch, remref) if (direction == "pull") { if (comparison$remote_only == 0) { return(invisible()) } else { ui_abort(c( "Local branch {.val {branch}} is behind {.val {remref}} by {comparison$remote_only} commit{?s}.", "Please use {.code {use}} to update." )) } } else { if (comparison$local_only == 0) { return(invisible()) } else { # TODO: consider offering to push for them? ui_abort(c( "Local branch {.val {branch}} is ahead of {.val {remref}} by {comparison$remote_only} commit{?s}.", "Please use {.code {use}} to update." )) } } } check_branch_pulled <- function(remref = NULL, use = NULL) { check_branch_up_to_date(direction = "pull", remref = remref, use = use) } check_branch_pushed <- function(remref = NULL, use = NULL) { check_branch_up_to_date(direction = "push", remref = remref, use = use) } usethis/R/utils-gh.R0000644000176200001440000000310614651000165014014 0ustar liggesusers# Functions that are in a grey area between usethis and gh gh_tr <- function(tr) { force(tr) function(endpoint, ...) { gh::gh( endpoint, ..., owner = tr$repo_owner, repo = tr$repo_name, .api_url = tr$api_url ) } } # Functions inlined from gh ---- get_baseurl <- function(url) { # https://github.uni.edu/api/v3/ if (!any(grepl("^https?://", url))) { stop("Only works with HTTP(S) protocols") } prot <- sub("^(https?://).*$", "\\1", url) # https:// rest <- sub("^https?://(.*)$", "\\1", url) # github.uni.edu/api/v3/ host <- sub("/.*$", "", rest) # github.uni.edu paste0(prot, host) # https://github.uni.edu } # https://api.github.com --> https://github.com # api.github.com --> github.com normalize_host <- function(x) { sub("api[.]github[.]com", "github.com", x) } get_hosturl <- function(url) { url <- get_baseurl(url) normalize_host(url) } # (almost) the inverse of get_hosturl() # https://github.com --> https://api.github.com # https://github.uni.edu --> https://github.uni.edu/api/v3 get_apiurl <- function(url) { host_url <- get_hosturl(url) prot_host <- strsplit(host_url, "://", fixed = TRUE)[[1]] if (is_github_dot_com(host_url)) { paste0(prot_host[[1]], "://api.github.com") } else { paste0(host_url, "/api/v3") } } is_github_dot_com <- function(url) { url <- get_baseurl(url) url <- normalize_host(url) grepl("^https?://github.com", url) } default_api_url <- function() { Sys.getenv("GITHUB_API_URL", unset = "https://api.github.com") } usethis/R/coverage.R0000644000176200001440000000316514717524721014074 0ustar liggesusers#' Test coverage #' #' Adds test coverage reporting to a package, using either Codecov #' (`https://codecov.io`) or Coveralls (`https://coveralls.io`). #' #' @param type Which web service to use. #' @eval param_repo_spec() #' @export use_coverage <- function(type = c("codecov", "coveralls"), repo_spec = NULL) { repo_spec <- repo_spec %||% target_repo_spec() type <- match.arg(type) if (type == "codecov") { new <- use_template("codecov.yml", ignore = TRUE) if (!new) { return(invisible(FALSE)) } } else if (type == "coveralls") { ui_bullets(c( "_" = "Turn on coveralls for this repo at {.url https://coveralls.io/repos/new}." )) } switch( type, codecov = use_codecov_badge(repo_spec), coveralls = use_coveralls_badge(repo_spec) ) ui_bullets(c( "_" = "Call {.code use_github_action(\"test-coverage\")} to continuously monitor test coverage." )) invisible(TRUE) } #' @export #' @rdname use_coverage #' @param files Character vector of file globs. use_covr_ignore <- function(files) { use_build_ignore(".covrignore") write_union(proj_path(".covrignore"), files) } use_codecov_badge <- function(repo_spec) { url <- glue("https://app.codecov.io/gh/{repo_spec}") img <- glue("https://codecov.io/gh/{repo_spec}/graph/badge.svg") use_badge("Codecov test coverage", url, img) } use_coveralls_badge <- function(repo_spec) { default_branch <- git_default_branch() url <- glue("https://coveralls.io/r/{repo_spec}?branch={default_branch}") img <- glue("https://coveralls.io/repos/github/{repo_spec}/badge.svg") use_badge("Coveralls test coverage", url, img) } usethis/R/tidyverse.R0000644000176200001440000003174014717524762014324 0ustar liggesusers#' Helpers for tidyverse development #' #' These helpers follow tidyverse conventions which are generally a little #' stricter than the defaults, reflecting the need for greater rigor in #' commonly used packages. #' #' @details #' #' * `create_tidy_package()`: creates a new package, immediately applies as many #' of the tidyverse conventions as possible, issues a few reminders, and #' activates the new package. #' #' * `use_tidy_dependencies()`: sets up standard dependencies used by all #' tidyverse packages (except packages that are designed to be dependency free). #' #' * `use_tidy_description()`: puts fields in standard order and alphabetises #' dependencies. #' #' * `use_tidy_eval()`: imports a standard set of helpers to facilitate #' programming with the tidy eval toolkit. #' #' * `use_tidy_style()`: styles source code according to the [tidyverse style #' guide](https://style.tidyverse.org). This function will overwrite files! See #' below for usage advice. #' #' * `use_tidy_contributing()`: adds standard tidyverse contributing guidelines. #' #' * `use_tidy_issue_template()`: adds a standard tidyverse issue template. #' #' * `use_tidy_release_test_env()`: updates the test environment section in #' `cran-comments.md`. #' #' * `use_tidy_support()`: adds a standard description of support resources for #' the tidyverse. #' #' * `use_tidy_coc()`: equivalent to `use_code_of_conduct()`, but puts the #' document in a `.github/` subdirectory. #' #' * `use_tidy_github()`: convenience wrapper that calls #' `use_tidy_contributing()`, `use_tidy_issue_template()`, `use_tidy_support()`, #' `use_tidy_coc()`. #' #' * [use_tidy_github_labels()] calls `use_github_labels()` to implement #' tidyverse conventions around GitHub issue label names and colours. #' #' * `use_tidy_upkeep_issue()` creates an issue containing a checklist of #' actions to bring your package up to current tidyverse standards. Also #' records the current date in the `Config/usethis/last-upkeep` field in #' `DESCRIPTION`. #' #' * `use_tidy_logo()` calls `use_logo()` on the appropriate hex sticker PNG #' file at . #' #' @section `use_tidy_style()`: #' Uses the [styler package](https://styler.r-lib.org) package to style all code #' in a package, project, or directory, according to the [tidyverse style #' guide](https://style.tidyverse.org). #' #' **Warning:** This function will overwrite files! It is strongly suggested to #' only style files that are under version control or to first create a backup #' copy. #' #' Invisibly returns a data frame with one row per file, that indicates whether #' styling caused a change. #' #' @param strict Boolean indicating whether or not a strict version of styling #' should be applied. See [styler::tidyverse_style()] for details. #' #' @name tidyverse NULL #' @export #' @rdname tidyverse #' @inheritParams create_package #' @inheritParams licenses create_tidy_package <- function(path, copyright_holder = NULL) { path <- create_package(path, rstudio = TRUE, open = FALSE) local_project(path) use_testthat() use_mit_license(copyright_holder) use_tidy_description() use_readme_rmd(open = FALSE) use_lifecycle_badge("experimental") use_cran_badge() use_cran_comments(open = FALSE) ui_bullets(c("i" = "In the new package, remember to do:")) ui_code_snippet(" usethis::use_git() usethis::use_github() usethis::use_tidy_github() usethis::use_tidy_github_actions() usethis::use_tidy_github_labels() usethis::use_pkgdown_github_pages() ") proj_activate(path) } #' @export #' @rdname tidyverse use_tidy_description <- function() { desc <- proj_desc() tidy_desc(desc) desc$write() invisible(TRUE) } #' @export #' @rdname tidyverse use_tidy_dependencies <- function() { check_has_package_doc("use_tidy_dependencies()") use_dependency("rlang", "Imports") use_dependency("lifecycle", "Imports") use_dependency("cli", "Imports") use_dependency("glue", "Imports") use_dependency("withr", "Imports") # standard imports imports <- any( roxygen_ns_append("@import rlang"), roxygen_ns_append("@importFrom glue glue"), roxygen_ns_append("@importFrom lifecycle deprecated") ) if (imports) { roxygen_update_ns() } # add badges; we don't need the details ui_silence(use_lifecycle()) # If needed, copy in lightweight purrr compatibility layer if (!proj_desc()$has_dep("purrr")) { use_directory("R") use_standalone("r-lib/rlang", "purrr") } invisible() } #' @export #' @rdname tidyverse use_tidy_contributing <- function() { use_dot_github() data <- list( Package = project_name(), github_spec = target_repo_spec(ask = FALSE) ) use_template( "tidy-contributing.md", path(".github", "CONTRIBUTING.md"), data = data ) } #' @export #' @rdname tidyverse use_tidy_support <- function() { use_dot_github() data <- list( Package = project_name(), github_spec = target_repo_spec(ask = FALSE) ) use_template( "tidy-support.md", path(".github", "SUPPORT.md"), data = data ) } #' @export #' @rdname tidyverse use_tidy_issue_template <- function() { use_dot_github() use_directory(path(".github", "ISSUE_TEMPLATE")) use_template( "tidy-issue.md", path(".github", "ISSUE_TEMPLATE", "issue_template.md") ) } #' @export #' @rdname tidyverse use_tidy_coc <- function() { old_top_level_coc <- proj_path(c("CODE_OF_CONDUCT.md", "CONDUCT.md")) old <- file_exists(old_top_level_coc) if (any(old)) { file_delete(old_top_level_coc[old]) } use_dot_github() use_coc(contact = "codeofconduct@posit.co", path = ".github") } #' @export #' @rdname tidyverse use_tidy_github <- function() { use_dot_github() use_tidy_contributing() use_tidy_issue_template() use_tidy_support() use_tidy_coc() } use_dot_github <- function(ignore = TRUE) { use_directory(".github", ignore = ignore) use_git_ignore("*.html", directory = ".github") } #' @export #' @rdname tidyverse use_tidy_style <- function(strict = TRUE) { check_installed("styler") challenge_uncommitted_changes(msg = " There are uncommitted changes and it is highly recommended to get into a \\ clean Git state before restyling your project's code") if (is_package()) { styled <- styler::style_pkg( proj_get(), style = styler::tidyverse_style, strict = strict ) } else { styled <- styler::style_dir( proj_get(), style = styler::tidyverse_style, strict = strict ) } ui_bullets(c( " " = "", "v" = "Styled project according to the tidyverse style guide." )) invisible(styled) } #' Identify contributors via GitHub activity #' #' Derives a list of GitHub usernames, based on who has opened issues or pull #' requests. Used to populate the acknowledgment section of package release blog #' posts at . If no arguments are given, we #' retrieve all contributors to the active project since its last (GitHub) #' release. Unexported helper functions, `releases()` and `ref_df()` can be #' useful interactively to get a quick look at release tag names and a data #' frame about refs (defaulting to releases), respectively. #' #' @param repo_spec Optional GitHub repo specification in any form accepted for #' the `repo_spec` argument of [create_from_github()] (plain spec or a browser #' or Git URL). A URL specification is the only way to target a GitHub host #' other than `"github.com"`, which is the default. #' @param from,to GitHub ref (i.e., a SHA, tag, or release) or a timestamp in #' ISO 8601 format, specifying the start or end of the interval of interest, #' in the sense of `[from, to]`. Examples: "08a560d", "v1.3.0", #' "2018-02-24T00:13:45Z", "2018-05-01". When `from = NULL, to = NULL`, we set #' `from` to the timestamp of the most recent (GitHub) release. Otherwise, #' `NULL` means "no bound". #' #' @return A character vector of GitHub usernames, invisibly. #' @export #' #' @examples #' \dontrun{ #' # active project, interval = since the last release #' use_tidy_thanks() #' #' # active project, interval = since a specific datetime #' use_tidy_thanks(from = "2020-07-24T00:13:45Z") #' #' # r-lib/usethis, interval = since a certain date #' use_tidy_thanks("r-lib/usethis", from = "2020-08-01") #' #' # r-lib/usethis, up to a specific release #' use_tidy_thanks("r-lib/usethis", from = NULL, to = "v1.1.0") #' #' # r-lib/usethis, since a specific commit, up to a specific date #' use_tidy_thanks("r-lib/usethis", from = "08a560d", to = "2018-05-14") #' #' # r-lib/usethis, but with copy/paste of a browser URL #' use_tidy_thanks("https://github.com/r-lib/usethis") #' } use_tidy_thanks <- function(repo_spec = NULL, from = NULL, to = NULL) { repo_spec <- repo_spec %||% target_repo_spec() parsed_repo_spec <- parse_repo_url(repo_spec) repo_spec <- parsed_repo_spec$repo_spec # this is the most practical way to propagate `host` to downstream helpers if (!is.null(parsed_repo_spec$host)) { withr::local_envvar(c(GITHUB_API_URL = parsed_repo_spec$host)) } if (is.null(to)) { from <- from %||% releases(repo_spec)[[1]] } from_timestamp <- as_timestamp(repo_spec, x = from) %||% "2008-01-01" to_timestamp <- as_timestamp(repo_spec, x = to) ui_bullets(c( "i" = "Looking for contributors from {as.Date(from_timestamp)} to {to_timestamp %||% 'now'}." )) res <- gh::gh( "/repos/{owner}/{repo}/issues", owner = spec_owner(repo_spec), repo = spec_repo(repo_spec), since = from_timestamp, state = "all", filter = "all", .limit = Inf ) if (length(res) < 1) { ui_bullets(c("x" = "No matching issues/PRs found.")) return(invisible()) } creation_time <- function(x) { as.POSIXct(map_chr(x, "created_at")) } res <- res[creation_time(res) >= as.POSIXct(from_timestamp)] if (!is.null(to_timestamp)) { res <- res[creation_time(res) <= as.POSIXct(to_timestamp)] } if (length(res) == 0) { ui_bullets(c("x" = "No matching issues/PRs found.")) return(invisible()) } contributors <- sort(unique(map_chr(res, c("user", "login")))) contrib_link <- glue("[@{contributors}](https://github.com/{contributors})") ui_bullets(c("v" = "Found {length(contributors)} contributors:")) ui_code_snippet( glue_collapse(contrib_link, sep = ", ", last = ", and ") + glue("."), language = "" ) invisible(contributors) } ## if x appears to be a timestamp, pass it through ## otherwise, assume it's a ref and look up its timestamp as_timestamp <- function(repo_spec, x = NULL) { if (is.null(x)) { return(NULL) } as_POSIXct <- try(as.POSIXct(x), silent = TRUE) if (inherits(as_POSIXct, "POSIXct")) { return(x) } ui_bullets(c("v" = "Resolving timestamp for ref {.val {x}}.")) ref_df(repo_spec, refs = x)$timestamp } ## returns a data frame on GitHub refs, defaulting to all releases ref_df <- function(repo_spec, refs = NULL) { check_name(repo_spec) check_character(refs, allow_null = TRUE) refs <- refs %||% releases(repo_spec) if (is.null(refs)) { return(NULL) } get_thing <- function(thing) { gh::gh( "/repos/{owner}/{repo}/commits/{thing}", owner = spec_owner(repo_spec), repo = spec_repo(repo_spec), thing = thing ) } res <- lapply(refs, get_thing) data.frame( ref = refs, sha = substr(map_chr(res, "sha"), 1, 7), timestamp = map_chr(res, c("commit", "committer", "date")), stringsAsFactors = FALSE ) } ## returns character vector of release tag names releases <- function(repo_spec) { check_name(repo_spec) res <- gh::gh( "/repos/{owner}/{repo}/releases", owner = spec_owner(repo_spec), repo = spec_repo(repo_spec) ) if (length(res) < 1) { return(NULL) } map_chr(res, "tag_name") } ## approaches based on available.packages() and/or installed.packages() present ## several edge cases, requirements, and gotchas ## for this application, hard-wiring seems to be "good enough" base_and_recommended <- function() { # base_pkgs <- as.vector(installed.packages(priority = "base")[, "Package"]) # av <- available.packages() # keep <- av[ , "Priority", drop = TRUE] %in% "recommended" # rec_pkgs <- unname(av[keep, "Package", drop = TRUE]) # dput(sort(unique(c(base_pkgs, rec_pkgs)))) c( "base", "boot", "class", "cluster", "codetools", "compiler", "datasets", "foreign", "graphics", "grDevices", "grid", "KernSmooth", "lattice", "MASS", "Matrix", "methods", "mgcv", "nlme", "nnet", "parallel", "rpart", "spatial", "splines", "stats", "stats4", "survival", "tcltk", "tools", "utils" ) } #' @rdname tidyverse #' @inheritParams use_logo #' @export use_tidy_logo <- function(geometry = "240x278", retina = TRUE) { if (!is_posit_pkg()) { ui_abort("This function only works for Posit packages.") } tf <- withr::local_tempfile(fileext = ".png") gh::gh( "/repos/rstudio/hex-stickers/contents/PNG/{pkg}.png/", pkg = project_name(), .destfile = tf, .accept = "application/vnd.github.v3.raw" ) use_logo(tf, geometry = geometry, retina = retina) } usethis/R/utils-github.R0000644000176200001440000010453614717524762014732 0ustar liggesusers# OWNER/REPO --> OWNER, REPO parse_repo_spec <- function(repo_spec) { repo_split <- strsplit(repo_spec, "/")[[1]] if (length(repo_split) != 2) { ui_abort("{.arg repo_spec} must be of the form {.val owner/repo}.") } list(owner = repo_split[[1]], repo = repo_split[[2]]) } spec_owner <- function(repo_spec) parse_repo_spec(repo_spec)$owner spec_repo <- function(repo_spec) parse_repo_spec(repo_spec)$repo # OWNER, REPO --> OWNER/REPO make_spec <- function(owner = NA, repo = NA) { no_spec <- is.na(owner) | is.na(repo) as.character(ifelse(no_spec, NA, glue("{owner}/{repo}"))) } # named vector or list of GitHub URLs --> data frame of URL parts # more general than the name suggests # definitely designed for GitHub URLs but not overtly GitHub-specific # https://stackoverflow.com/questions/2514859/regular-expression-for-git-repository # https://git-scm.com/docs/git-clone#_git_urls # https://stackoverflow.com/questions/27745/getting-parts-of-a-url-regex github_remote_regex <- paste0( "^", "(?\\w+://)?", "(?.+@)?", "(?[^/:]+)", "[/:]", "(?[^/]+)", "/", "(?[^/#]+)", "(?.*)", "$" ) parse_github_remotes <- function(x) { # https://github.com/r-lib/usethis # --> https, github.com, rlib, usethis # https://github.com/r-lib/usethis.git # --> https, github.com, rlib, usethis # https://github.com/r-lib/usethis#readme # --> https, github.com, rlib, usethis # https://github.com/r-lib/usethis/issues/1169 # --> https, github.com, rlib, usethis # https://github.acme.com/r-lib/devtools.git # --> https, github.acme.com, rlib, usethis # git@github.com:r-lib/usethis.git # --> ssh, github.com, rlib, usethis # ssh://git@github.com/rstudio/packrat.git # --> ssh, github.com, rlib, usethis dat <- re_match(x, github_remote_regex) dat$protocol <- sub("://$", "", dat$protocol) dat$user <- sub("@$", "", dat$user) dat$repo_name <- sub("[.]git$", "", dat$repo_name) dat$url <- dat$.text # as.character() necessary for edge case of length-0 input dat$protocol <- as.character(ifelse(dat$protocol == "https", "https", "ssh")) dat$name <- if (rlang::is_named(x)) { names(x) } else { rep_len(NA_character_, length.out = nrow(dat)) } dat[c("name", "url", "host", "repo_owner", "repo_name", "protocol")] } parse_repo_url <- function(x) { check_name(x) dat <- re_match(x, github_remote_regex) if (is.na(dat$.match)) { list(repo_spec = x, host = NULL) } else { dat <- parse_github_remotes(x) # TODO: generalize here for GHE hosts that don't include 'github' if (!grepl("github", dat$host)) { ui_abort("URL doesn't seem to be associated with GitHub: {.val {x}}") } list( repo_spec = make_spec(owner = dat$repo_owner, repo = dat$repo_name), host = glue("https://{dat$host}") ) } } # Can be called in contexts where we have already asked user to choose between # origin and upstsream and, therefore, we know the remote URL. We parse it # regardless, because: # (1) Could be SSH not HTTPS # (2) Could be hosted on GHE not github.com github_url_from_git_remotes <- function(url = NULL) { if (is.null(url)) { tr <- tryCatch(target_repo(github_get = NA), error = function(e) NULL) if (is.null(tr)) { return() } url <- tr$url } parsed <- parse_github_remotes(url) glue_data_chr(parsed, "https://{host}/{repo_owner}/{repo_name}") } #' Gather LOCAL data on GitHub-associated remotes #' #' Creates a data frame where each row represents a GitHub-associated remote. #' The data frame is initialized via `gert::git_remote_list()`, possibly #' filtered for specific remote names. The remote URLs are parsed into parts, #' like `host` and `repo_owner`. This is filtered again for rows where the #' `host` appears to be a GitHub deployment (currently a crude search for #' "github"). Some of these parts are recombined or embellished to get new #' columns (`host_url`, `api_url`, `repo_spec`). All operations are entirely #' mechanical and local. #' #' @param these Intersect the list of remotes with `these` remote names. To keep #' all remotes, use `these = NULL` or `these = character()`. #' @param x Data frame with character columns `name` and `url`. Exposed as an #' argument for internal reasons. It's so we can call the functions that #' marshal info about GitHub remotes with 0-row input to obtain a properly #' typed template without needing a Git repo or calling GitHub. We just want #' to get a data frame with zero rows, but with the column names and types #' implicit in our logic. #' @keywords internal #' @noRd github_remote_list <- function(these = c("origin", "upstream"), x = NULL) { x <- x %||% gert::git_remote_list(repo = git_repo()) check_character(these, allow_null = TRUE) check_data_frame(x) check_character(x$name) check_character(x$url) if (length(these) > 0) { x <- x[x$name %in% these, ] } parsed <- parse_github_remotes(set_names(x$url, x$name)) # TODO: generalize here for GHE hosts that don't include 'github' is_github <- grepl("github", parsed$host) parsed <- parsed[is_github, ] parsed$remote <- parsed$name parsed$host_url <- glue_chr("https://{parsed$host}") parsed$api_url <- map_chr(parsed$host_url, get_apiurl) parsed$repo_spec <- make_spec(parsed$repo_owner, parsed$repo_name) parsed[c( "remote", "url", "host_url", "api_url", "host", "protocol", "repo_owner", "repo_name", "repo_spec" )] } #' Gather LOCAL and (maybe) REMOTE data on GitHub-associated remotes #' #' Creates a data frame where each row represents a GitHub-associated remote, #' starting with the output of `github_remote_list()` (local data). This #' function's job is to (maybe) add information we can only get from the GitHub #' API. If `github_get = FALSE`, we don't even attempt to call the API. #' Otherwise, we try and will succeed if gh discovers a suitable token. The #' resulting data, even if the API data is absent, is massaged into a data #' frame. #' #' @inheritParams github_remote_list #' @param github_get Whether to attempt to get repo info from the GitHub API. We #' try for `NA` (the default) and `TRUE`. If we aren't successful, we proceed #' anyway for `NA` but error for `TRUE`. When `FALSE`, no attempt is made to #' call the API. #' @keywords internal #' @noRd github_remotes <- function(these = c("origin", "upstream"), github_get = NA, x = NULL) { grl <- github_remote_list(these = these, x = x) get_gh_repo <- function(repo_owner, repo_name, api_url = "https://api.github.com") { if (isFALSE(github_get)) { f <- function(...) list() } else { f <- purrr::possibly(gh::gh, otherwise = list()) } f( "GET /repos/{owner}/{repo}", owner = repo_owner, repo = repo_name, .api_url = api_url ) } repo_info <- purrr::pmap( grl[c("repo_owner", "repo_name", "api_url")], get_gh_repo ) # NOTE: these can be two separate matters: # 1. Did we call the GitHub API? Means we know `is_fork` and the parent repo. # 2. If so, did we call it with auth? Means we know if we can push. grl$github_got <- map_lgl(repo_info, ~ length(.x) > 0) if (isTRUE(github_get) && !all(grl$github_got)) { oops <- which(!grl$github_got) oops_remotes <- grl$remote[oops] oops_hosts <- unique(grl$host[oops]) ui_abort(c( "Unable to get GitHub info for these remotes: {.val {oops_remotes}}.", "Are we offline? Is GitHub down? Has the repo been deleted?", "Otherwise, you probably need to configure a personal access token (PAT) for {.val {oops_hosts}}.", "See {.run usethis::gh_token_help()} for advice." )) } grl$default_branch <- map_chr(repo_info, "default_branch", .default = NA) grl$is_fork <- map_lgl(repo_info, "fork", .default = NA) # `permissions` is an example of data that is not present if the request # did not include a PAT grl$can_push <- map_lgl(repo_info, c("permissions", "push"), .default = NA) grl$can_admin <- map_lgl(repo_info, c("permissions", "admin"), .default = NA) grl$perm_known <- !is.na(grl$can_push) grl$parent_repo_owner <- map_chr(repo_info, c("parent", "owner", "login"), .default = NA) grl$parent_repo_name <- map_chr(repo_info, c("parent", "name"), .default = NA) grl$parent_repo_spec <- make_spec(grl$parent_repo_owner, grl$parent_repo_name) parent_info <- purrr::pmap( set_names( grl[c("parent_repo_owner", "parent_repo_name", "api_url")], ~ sub("parent_", "", .x) ), get_gh_repo ) grl$can_push_to_parent <- map_lgl(parent_info, c("permissions", "push"), .default = NA) grl } #' Classify the GitHub remote configuration #' #' @description #' Classify the active project's GitHub remote situation, so diagnostic and #' other downstream functions can decide whether to proceed / abort / complain & #' offer to fix. #' We only consider the remotes where: #' * Name is `origin` or `upstream` and the remote URL "looks like github" #' (github.com or a GHE deployment) #' #' We have to call the GitHub API to fully characterize the GitHub remote #' situation. That's the only way to learn if the user can push to a remote, #' whether a remote is a fork, and which repo is the parent of a fork. #' `github_get` controls whether we make these API calls. #' #' Some functions can get by with the information that's available locally, i.e. #' we can use simple logic to decide whether to target `origin` or `upstream` or #' present the user with a choice. We can set `github_get = FALSE` in this case. #' Other functions, like the `pr_*()` functions, are more demanding and we'll #' always determine the config with `github_get = TRUE`. #' #' Most usethis functions should call the higher-level functions `target_repo()` #' or `target_repo_spec()`. #' #' Only functions that really need full access to the GitHub remote config #' should call this directly. Ways to work with a config: #' * `cfg <- github_remote_config(github_get = )` #' * `check_for_bad_config(cfg)` errors for obviously bad configs (by default) #' or you can specify the configs considered to be bad #' * Emit a custom message then call `stop_bad_github_remote_config()` directly #' * If the config is suboptimal-but-supported, use #' `ui_github_remote_config_wat()` to educate the user and give them a chance #' to back out. #' #' Fields in an instance of `github_remote_config`: #' * `type`: explained below #' * `pr_ready`: Logical. Do the `pr_*()` functions support it? #' * `desc`: A description used in messages and menus. #' * `origin`: Information about the `origin` GitHub remote. #' * `upstream`: Information about the `upstream` GitHub remote. #' #' Possible GitHub remote configurations, the common cases: #' * no_github: No `origin`, no `upstream`. #' * ours: `origin` exists, is not a fork, and we can push to it. Owner of #' `origin` could be current user, another user, or an org. No `upstream`. #' - Less common variant: `upstream` exists, `origin` does not, and we can #' push to `upstream`. The fork-ness of `upstream` is not consulted. #' * fork: `origin` exists and we can push to it. `origin` is a fork of the repo #' configured as `upstream`. We may or may not be able to push to `upstream`. #' * theirs: Exactly one of `origin` and `upstream` exist and we can't push to #' it. The fork-ness of this remote repo is not consulted. #' #' Possible GitHub remote configurations, the peculiar ones: #' * fork_upstream_is_not_origin_parent: `origin` exists, it's a fork, but its #' parent repo is not configured as `upstream`. Either there's no `upstream` #' or `upstream` exists but it's not the parent of `origin`. #' * fork_cannot_push_origin: `origin` is a fork and its parent is configured #' as `upstream`. But we can't push to `origin`. #' * upstream_but_origin_is_not_fork: `origin` and `upstream` both exist, but #' `origin` is not a fork of anything and, specifically, it's not a fork of #' `upstream`. #' #' Remote configuration "guesses" we apply when `github_get = FALSE` or when #' we make unauthorized requests (no PAT found) and therefore have no info on #' permissions #' * maybe_ours_or_theirs: Exactly one of `origin` and `upstream` exists. #' * maybe_fork: Both `origin` and `upstream` exist. #' #' @inheritParams github_remotes #' @keywords internal #' @noRd new_github_remote_config <- function() { ptype <- github_remotes( x = data.frame(name = character(), url = character(), stringsAsFactors = FALSE) ) # 0-row df --> a well-named list of properly typed NAs ptype <- map(ptype, ~ c(NA, .x)) structure( list( type = NA_character_, host_url = NA_character_, pr_ready = FALSE, desc = "Unexpected remote configuration.", origin = c(name = "origin", is_configured = FALSE, ptype), upstream = c(name = "upstream", is_configured = FALSE, ptype) ), class = "github_remote_config" ) } github_remote_config <- function(github_get = NA) { cfg <- new_github_remote_config() grl <- github_remotes(github_get = github_get) if (nrow(grl) == 0) { return(cfg_no_github(cfg)) } cfg$origin$is_configured <- "origin" %in% grl$remote cfg$upstream$is_configured <- "upstream" %in% grl$remote single_remote <- xor(cfg$origin$is_configured, cfg$upstream$is_configured) if (!single_remote) { if (length(unique(grl$host)) != 1) { ui_abort(c( "Internal error: Multiple GitHub hosts.", "{.val {grl$host}}" )) } if (length(unique(grl$github_got)) != 1) { ui_abort(c( "Internal error: Got GitHub API info for some remotes, but not all.", "Do all the remotes still exist? Do you still have access?" )) } if (length(unique(grl$perm_known)) != 1) { ui_abort(" Internal error: Know GitHub permissions for some remotes, but not all.") } } cfg$host_url <- unique(grl$host_url) github_got <- any(grl$github_got) perm_known <- any(grl$perm_known) if (cfg$origin$is_configured) { cfg$origin <- utils::modifyList(cfg$origin, grl[grl$remote == "origin",]) } if (cfg$upstream$is_configured) { cfg$upstream <- utils::modifyList(cfg$upstream, grl[grl$remote == "upstream",]) } if (github_got && !single_remote) { cfg$origin$parent_is_upstream <- identical(cfg$origin$parent_repo_spec, cfg$upstream$repo_spec) } if (!github_got || !perm_known) { if (single_remote) { return(cfg_maybe_ours_or_theirs(cfg)) } else { return(cfg_maybe_fork(cfg)) } } # `github_got` must be TRUE # `perm_known` must be TRUE # origin only if (single_remote && cfg$origin$is_configured) { if (cfg$origin$is_fork) { if (cfg$origin$can_push) { return(cfg_fork_upstream_is_not_origin_parent(cfg)) } else { return(cfg_theirs(cfg)) } } else { if (cfg$origin$can_push) { return(cfg_ours(cfg)) } else { return(cfg_theirs(cfg)) } } } # upstream only if (single_remote && cfg$upstream$is_configured) { if (cfg$upstream$can_push) { return(cfg_ours(cfg)) } else { return(cfg_theirs(cfg)) } } # origin and upstream if (cfg$origin$is_fork) { if (cfg$origin$parent_is_upstream) { if (cfg$origin$can_push) { return(cfg_fork(cfg)) } else { return(cfg_fork_cannot_push_origin(cfg)) } } else { return(cfg_fork_upstream_is_not_origin_parent(cfg)) } } else { return(cfg_upstream_but_origin_is_not_fork(cfg)) } } #' Select a target (GitHub) repo #' #' @description #' Returns information about ONE GitHub repository. Used when we need to #' designate which repo we will, e.g., open an issue on or activate a CI service #' for. This information might be used in a GitHub API request or to form URLs. #' #' Examples: #' * Badge URLs #' * URLs where you can activate a CI service #' * URLs for DESCRIPTION fields such as URL and BugReports #' `target_repo()` passes `github_get` along to `github_remote_config()`. If #' `github_get = TRUE`, `target_repo()` will error for configs other than #' `"ours"` or `"fork"`. `target_repo()` always errors for bad configs. If #' `github_get = NA` or `FALSE`, the "maybe" configs are tolerated. #' #' `target_repo_spec()` is a less capable function for when you just need an #' `OWNER/REPO` spec. Currently, it does not set or offer control over #' `github_get`, although I've considered explicitly setting `github_get = #' FALSE` or adding this argument, defaulting to `FALSE`. #' #' @inheritParams github_remotes #' @param cfg An optional GitHub remote configuration. Used to get the target #' repo when the function had some need for the full config. #' @param role We use "source" to mean the principal repo where a project's #' development happens. We use "primary" to mean the principal repo this #' particular user interacts with or has the greatest power over. They can be #' the same or different. Examples: #' * For a personal project you own, "source" and "primary" are the same. #' Presumably the `origin` remote. #' * For a collaboratively developed project, an outside contributor must create #' a fork in order to make a PR. For such a person, their fork is "primary" #' (presumably `origin`) and the original repo that they forked is "source" #' (presumably `upstream`). #' This is *almost* consistent with terminology used by the GitHub API. A fork #' has a "source repo" and a "parent repo", which are usually the same. They #' only differ when working with a fork of a repo that is itself a fork. In this #' rare case, the parent is the immediate fork parent and the source is the #' ur-parent, i.e. the root of this particular tree. The source repo is not a #' fork. #' @param ask In some configurations, if `ask = TRUE` and we're in an #' interactive session, user gets a choice between `origin` and `upstream`. #' @keywords internal #' @noRd target_repo <- function(cfg = NULL, github_get = NA, role = c("source", "primary"), ask = is_interactive(), ok_configs = c("ours", "fork", "theirs")) { cfg <- cfg %||% github_remote_config(github_get = github_get) stopifnot(inherits(cfg, "github_remote_config")) role <- match.arg(role) check_for_bad_config(cfg) if (isTRUE(github_get)) { check_for_config(cfg, ok_configs = ok_configs) } # upstream only if (cfg$upstream$is_configured && !cfg$origin$is_configured) { return(cfg$upstream) } # origin only if (cfg$origin$is_configured && !cfg$upstream$is_configured) { return(cfg$origin) } if (!ask || !is_interactive()) { return(switch( role, source = cfg$upstream, primary = cfg$origin )) } choices <- c( origin = ui_pre_glue("<> = {.val origin}"), upstream = ui_pre_glue("<> = {.val upstream}") ) choices_formatted <- map_chr(choices, cli::format_inline) title <- "Which repo should we target?" choice <- utils::menu(choices_formatted, graphics = FALSE, title = title) cfg[[names(choices)[choice]]] } target_repo_spec <- function(role = c("source", "primary"), ask = is_interactive()) { tr <- target_repo(role = match.arg(role), ask = ask) tr$repo_spec } # formatting github remote configurations for humans --------------------------- pre_format_remote <- function(remote) { effective_spec <- function(remote) { if (remote$is_configured) { ui_pre_glue("{.val <>}") } else { ui_special("not configured") } } push_clause <- function(remote) { if (!remote$is_configured || is.na(remote$can_push)) { return() } if (remote$can_push) " (can push)" else " (can not push)" } out <- c( glue("{remote$name} = {effective_spec(remote)}"), push_clause(remote), if (isTRUE(remote$is_fork)) { ui_pre_glue(" = fork of {.val <>}") } ) glue_collapse(out) } pre_format_fields <- function(cfg) { list( type = ui_pre_glue("Type = {.val <>}"), host_url = ui_pre_glue("Host = {.val <>}"), # extra brackets here ensure value is formatted as logical (vs string) pr_ready = ui_pre_glue("Config supports a pull request = {.val {<>}}"), origin = pre_format_remote(cfg$origin), upstream = pre_format_remote(cfg$upstream), desc = cfg$desc ) } #' @export format.github_remote_config <- function(x, ...) { x_fmt <- pre_format_fields(x) x_fmt$desc <- map_chr(x_fmt$desc, cli::format_inline) x_fmt <- purrr::map_if(x_fmt, function(x) length(x) == 1, cli::format_inline) out <- unlist(unname(x_fmt)) nms <- names2(out) nms <- ifelse(nzchar(nms), nms, "*") names(out) <- nms out } #' @export print.github_remote_config <- function(x, ...) { withr::local_options(usethis.quiet = FALSE) ui_bullets(format(x, ...)) invisible(x) } # refines output of format_fields() to create input better suited to # ui_github_remote_config_wat() and stop_bad_github_remote_config() github_remote_config_wat <- function(cfg, context = c("menu", "abort")) { context <- match.arg(context) adjective <- switch(context, menu = "Unexpected", abort = "Unsupported") out <- format(cfg) type_idx <- grep("^Type", out) out[type_idx] <- ui_pre_glue(" <> GitHub remote configuration: {.val <>}") names(out)[type_idx] <- "x" pr_idx <- grep("pull request", out) out <- out[-pr_idx] unlist(out) } # returns TRUE if user selects "no" --> exit the calling function # return FALSE if user select "yes" --> keep going, they've been warned ui_github_remote_config_wat <- function(cfg) { ui_nah( github_remote_config_wat(cfg, context = "menu"), yes = "Yes, I want to proceed. I know what I'm doing.", no = "No, I want to stop and straighten out my GitHub remotes first.", shuffle = FALSE ) } stop_bad_github_remote_config <- function(cfg) { ui_abort( github_remote_config_wat(cfg, context = "abort"), class = "usethis_error_bad_github_remote_config", cfg = cfg ) } stop_maybe_github_remote_config <- function(cfg) { msg <- c( ui_pre_glue(" Pull request functions can't work with GitHub remote configuration: {.val <>}."), "The most likely problem is that we aren't discovering your GitHub personal access token.", github_remote_config_wat(cfg) ) idx <- grep("Unexpected GitHub remote configuration", msg) msg <- msg[-idx] ui_abort( message = unlist(msg), class = "usethis_error_invalid_pr_config", cfg = cfg ) } check_for_bad_config <- function(cfg, bad_configs = c( "no_github", "fork_upstream_is_not_origin_parent", "fork_cannot_push_origin", "upstream_but_origin_is_not_fork" )) { if (cfg$type %in% bad_configs) { stop_bad_github_remote_config(cfg) } invisible() } check_for_maybe_config <- function(cfg) { maybe_configs <- grep("^maybe_", all_configs(), value = TRUE) if (cfg$type %in% maybe_configs) { stop_maybe_github_remote_config(cfg) } invisible() } check_for_config <- function(cfg = NULL, ok_configs = c("ours", "fork", "theirs")) { cfg <- cfg %||% github_remote_config(github_get = TRUE) stopifnot(inherits(cfg, "github_remote_config")) if (cfg$type %in% ok_configs) { return(invisible(cfg)) } check_for_maybe_config(cfg) bad_configs <- grep("^maybe_", all_configs(), invert = TRUE, value = TRUE) bad_configs <- setdiff(bad_configs, ok_configs) check_for_bad_config(cfg, bad_configs = bad_configs) ui_abort(" Internal error: Unexpected GitHub remote configuration: {.val {cfg$type}}.") } check_can_push <- function(tr = target_repo(github_get = TRUE), objective = "for this operation") { if (isTRUE(tr$can_push)) { return(invisible()) } ui_abort(" You don't seem to have push access for {.val {tr$repo_spec}}, which is required {objective}.") } # github remote configurations ------------------------------------------------- all_configs <- function() { c( "no_github", "ours", "theirs", "maybe_ours_or_theirs", "fork", "maybe_fork", "fork_cannot_push_origin", "fork_upstream_is_not_origin_parent", "upstream_but_origin_is_not_fork" ) } read_more <- function() { c( "i" = "Read more about the GitHub remote configurations that usethis supports at:", " " = "{.url https://happygitwithr.com/common-remote-setups.html}." ) } read_more_maybe <- function() { c( "i" = "Read more about what this GitHub remote configuration means at:", " " = "{.url https://happygitwithr.com/common-remote-setups.html}." ) } cfg_no_github <- function(cfg) { utils::modifyList( cfg, list( type = "no_github", pr_ready = FALSE, desc = c( "!" = "Neither {.val origin} nor {.val upstream} is a GitHub repo.", read_more() ) ) ) } cfg_ours <- function(cfg) { utils::modifyList( cfg, list( type = "ours", pr_ready = TRUE, desc = c( "i" = "{.val origin} is both the source and primary repo.", read_more() ) ) ) } cfg_theirs <- function(cfg) { configured <- if (cfg$origin$is_configured) "origin" else "upstream" utils::modifyList( cfg, list( type = "theirs", pr_ready = FALSE, desc = c( "!" = ui_pre_glue(" The only configured GitHub remote is {.val <>}, which you cannot push to."), "i" = "If your goal is to make a pull request, you must fork-and-clone.", "i" = "{.fun usethis::create_from_github} can do this.", read_more() ) ) ) } cfg_maybe_ours_or_theirs <- function(cfg) { if (cfg$origin$is_configured) { configured <- "origin" not_configured <- "upstream" } else { configured <- "upstream" not_configured <- "origin" } utils::modifyList( cfg, list( type = "maybe_ours_or_theirs", pr_ready = NA, desc = c( "!" = ui_pre_glue(" {.val <>} is a GitHub repo and {.val <>} is either not configured or is not a GitHub repo."), "i" = "We may be offline or you may need to configure a GitHub personal access token.", "i" = "{.run usethis::gh_token_help()} can help with that.", read_more_maybe() ) ) ) } cfg_fork <- function(cfg) { utils::modifyList( cfg, list( type = "fork", pr_ready = TRUE, desc = c( "i" = ui_pre_glue(" {.val origin} is a fork of {.val <>}, which is configured as the {.val upstream} remote."), read_more() ) ) ) } cfg_maybe_fork <- function(cfg) { utils::modifyList( cfg, list( type = "maybe_fork", pr_ready = NA, desc = c( "!" = ui_pre_glue(" Both {.val origin} and {.val upstream} appear to be GitHub repos. However, we can't confirm their relationship to each other (e.g., fork and fork parent) or your permissions (e.g. push access)."), "i" = "We may be offline or you may need to configure a GitHub personal access token.", "i" = "{.run usethis::gh_token_help()} can help with that.", read_more_maybe() ) ) ) } cfg_fork_cannot_push_origin <- function(cfg) { utils::modifyList( cfg, list( type = "fork_cannot_push_origin", pr_ready = FALSE, desc = c( "!" = ui_pre_glue(" The {.val origin} remote is a fork, but you can't push to it."), read_more() ) ) ) } cfg_fork_upstream_is_not_origin_parent <- function(cfg) { utils::modifyList( cfg, list( type = "fork_upstream_is_not_origin_parent", pr_ready = FALSE, desc = c( "!" = ui_pre_glue(" The {.val origin} GitHub remote is a fork, but its parent is not configured as the {.val upstream} remote."), read_more() ) ) ) } cfg_upstream_but_origin_is_not_fork <- function(cfg) { utils::modifyList( cfg, list( type = "upstream_but_origin_is_not_fork", pr_ready = FALSE, desc = c( "!" = ui_pre_glue(" Both {.val origin} and {.val upstream} are GitHub remotes, but {.val origin} is not a fork and, in particular, is not a fork of {.val upstream}."), read_more() ) ) ) } # construct instances of `github_remote_config` for dev/testing purposes-------- new_no_github <- function() { cfg <- new_github_remote_config() cfg_no_github(cfg) } new_ours <- function() { remotes <- data.frame(name = "origin", url = "https://github.com/OWNER/REPO.git") grl <- github_remotes(github_get = FALSE, x = remotes) grl$github_got <- grl$perm_known <- TRUE grl$default_branch <- "DEFAULT_BRANCH" grl$is_fork <- FALSE grl$can_push <- grl$can_admin <- TRUE cfg <- new_github_remote_config() cfg$origin <- utils::modifyList(cfg$origin, grl[grl$remote == "origin",]) cfg$host_url <- grl$host_url cfg$origin$is_configured <- TRUE cfg_ours(cfg) } new_theirs <- function() { remotes <- data.frame(name = "origin", url = "https://github.com/OWNER/REPO.git") grl <- github_remotes(github_get = FALSE, x = remotes) grl$github_got <- grl$perm_known <- TRUE grl$default_branch <- "DEFAULT_BRANCH" grl$can_push <- grl$can_admin <- FALSE cfg <- new_github_remote_config() cfg$origin <- utils::modifyList(cfg$origin, grl[grl$remote == "origin",]) cfg$host_url <- grl$host_url cfg$origin$is_configured <- TRUE cfg_theirs(cfg) } new_fork <- function() { remotes <- data.frame( name = c("origin", "upstream"), url = c("https://github.com/CONTRIBUTOR/REPO.git", "https://github.com/OWNER/REPO.git") ) grl <- github_remotes(github_get = FALSE, x = remotes) grl$github_got <- grl$perm_known <- TRUE grl$default_branch <- "DEFAULT_BRANCH" grl$is_fork <- c(TRUE, FALSE) grl$parent_repo_owner <- c("OWNER", NA) grl$parent_repo_name <- c("REPO", NA) grl$can_push_to_parent <- c(FALSE, NA) grl$parent_repo_spec <- make_spec(grl$parent_repo_owner, grl$parent_repo_name) grl$can_push <- grl$can_admin <- c(TRUE, FALSE) cfg <- new_github_remote_config() cfg$origin <- utils::modifyList(cfg$origin, grl[grl$remote == "origin",]) cfg$upstream <- utils::modifyList(cfg$upstream, grl[grl$remote == "upstream",]) cfg$host_url <- grl$host_url[1] cfg$origin$is_configured <- cfg$upstream$is_configured <- TRUE cfg$origin$parent_is_upstream <- TRUE cfg_fork(cfg) } new_maybe_ours_or_theirs <- function() { remotes <- data.frame(name = "origin", url = "https://github.com/OWNER/REPO.git") grl <- github_remotes(github_get = FALSE, x = remotes) grl$github_got <-grl$perm_known <- FALSE grl$default_branch <- "DEFAULT_BRANCH" cfg <- new_github_remote_config() cfg$origin <- utils::modifyList(cfg$origin, grl[grl$remote == "origin",]) cfg$host_url <- grl$host_url cfg$origin$is_configured <- TRUE cfg_maybe_ours_or_theirs(cfg) } new_maybe_fork <- function() { remotes <- data.frame( name = c("origin", "upstream"), url = c("https://github.com/CONTRIBUTOR/REPO.git", "https://github.com/OWNER/REPO.git") ) grl <- github_remotes(github_get = FALSE, x = remotes) grl$github_got <-grl$perm_known <- FALSE grl$default_branch <- "DEFAULT_BRANCH" cfg <- new_github_remote_config() cfg$origin <- utils::modifyList(cfg$origin, grl[grl$remote == "origin",]) cfg$upstream <- utils::modifyList(cfg$upstream, grl[grl$remote == "upstream",]) cfg$host_url <- grl$host_url[1] cfg$origin$is_configured <- cfg$upstream$is_configured <- TRUE cfg_maybe_fork(cfg) } new_fork_cannot_push_origin <- function() { remotes <- data.frame( name = c("origin", "upstream"), url = c("https://github.com/CONTRIBUTOR/REPO.git", "https://github.com/OWNER/REPO.git") ) grl <- github_remotes(github_get = FALSE, x = remotes) grl$github_got <-grl$perm_known <- TRUE grl$default_branch <- "DEFAULT_BRANCH" cfg <- new_github_remote_config() cfg$origin <- utils::modifyList(cfg$origin, grl[grl$remote == "origin",]) cfg$upstream <- utils::modifyList(cfg$upstream, grl[grl$remote == "upstream",]) cfg$host_url <- grl$host_url[1] cfg$origin$is_configured <- cfg$upstream$is_configured <- TRUE cfg$origin$parent_is_upstream <- FALSE cfg_fork_cannot_push_origin(cfg) } new_fork_upstream_is_not_origin_parent<- function() { remotes <- data.frame( name = c("origin", "upstream"), url = c("https://github.com/CONTRIBUTOR/REPO.git", "https://github.com/OLD_OWNER/REPO.git") ) grl <- github_remotes(github_get = FALSE, x = remotes) grl$github_got <- grl$perm_known <- TRUE grl$default_branch <- "DEFAULT_BRANCH" grl$is_fork <- c(TRUE, FALSE) grl$parent_repo_owner <- c("NEW_OWNER", NA) grl$parent_repo_name <- c("REPO", NA) grl$can_push_to_parent <- c(FALSE, NA) grl$parent_repo_spec <- make_spec(grl$parent_repo_owner, grl$parent_repo_name) grl$can_push <- grl$can_admin <- c(TRUE, FALSE) cfg <- new_github_remote_config() cfg$origin <- utils::modifyList(cfg$origin, grl[grl$remote == "origin",]) cfg$upstream <- utils::modifyList(cfg$upstream, grl[grl$remote == "upstream",]) cfg$host_url <- grl$host_url[1] cfg$origin$is_configured <- cfg$upstream$is_configured <- TRUE cfg_fork_upstream_is_not_origin_parent(cfg) } new_upstream_but_origin_is_not_fork <- function() { remotes <- data.frame( name = c("origin", "upstream"), url = c("https://github.com/CONTRIBUTOR/REPO.git", "https://github.com/OWNER/REPO.git") ) grl <- github_remotes(github_get = FALSE, x = remotes) grl$github_got <-grl$perm_known <- TRUE grl$default_branch <- "DEFAULT_BRANCH" grl$is_fork <- FALSE cfg <- new_github_remote_config() cfg$origin <- utils::modifyList(cfg$origin, grl[grl$remote == "origin",]) cfg$upstream <- utils::modifyList(cfg$upstream, grl[grl$remote == "upstream",]) cfg$host_url <- grl$host_url[1] cfg$origin$is_configured <- cfg$upstream$is_configured <- TRUE cfg$origin$parent_is_upstream <- FALSE cfg_upstream_but_origin_is_not_fork(cfg) } usethis/R/namespace.R0000644000176200001440000000146414651000165014221 0ustar liggesusers#' Use a basic `NAMESPACE` #' #' If `roxygen` is `TRUE` generates an empty `NAMESPACE` that exports nothing; #' you'll need to explicitly export functions with `@export`. If `roxygen` #' is `FALSE`, generates a default `NAMESPACE` that exports all functions #' except those that start with `.`. #' #' @param roxygen Do you plan to manage `NAMESPACE` with roxygen2? #' @seealso The [namespace #' chapter](https://r-pkgs.org/dependencies-mindset-background.html#sec-dependencies-namespace) #' of [R Packages](https://r-pkgs.org). #' @export use_namespace <- function(roxygen = TRUE) { check_is_package("use_namespace()") path <- proj_path("NAMESPACE") if (roxygen) { write_over(path, c("# Generated by roxygen2: do not edit by hand", "")) } else { write_over(path, 'exportPattern("^[^\\\\.]")') } } usethis/R/jenkins.R0000644000176200001440000000116514651000165013724 0ustar liggesusers#' Create Jenkinsfile for Jenkins CI Pipelines #' #' `use_jenkins()` adds a basic Jenkinsfile for R packages to the project root #' directory. The Jenkinsfile stages take advantage of calls to `make`, and so #' calling this function will also run `use_make()` if a Makefile does not #' already exist at the project root. #' #' @seealso The [documentation on Jenkins #' Pipelines](https://www.jenkins.io/doc/book/pipeline/jenkinsfile/). #' @seealso [use_make()] #' @export use_jenkins <- function() { use_make() use_template( "Jenkinsfile", data = list(name = project_name()) ) use_build_ignore("Jenkinsfile") } usethis/R/data-table.R0000644000176200001440000000255014651000165014260 0ustar liggesusers#' Prepare for importing data.table #' #' `use_data_table()` imports the `data.table()` function from the data.table #' package, as well as several important symbols: `:=`, `.SD`, `.BY`, `.N`, #' `.I`, `.GRP`, `.NGRP`, `.EACHI`. This is a minimal setup and you can learn #' much more in the "Importing data.table" vignette: #' `https://rdatatable.gitlab.io/data.table/articles/datatable-importing.html`. #' In addition to importing these functions, `use_data_table()` also blocks the #' usage of data.table in the `Depends` field of the `DESCRIPTION` file; #' `data.table` should be used as an _imported_ or _suggested_ package only. See #' this [discussion](https://github.com/Rdatatable/data.table/issues/3076). #' #' @export use_data_table <- function() { check_is_package("use_data_table()") check_installed("data.table") check_uses_roxygen("use_data_table()") desc <- proj_desc() deps <- desc$get_deps() if (any(deps$type == "Depends" & deps$package == "data.table")) { ui_bullets(c( "!" = "{.pkg data.table} should be in {.field Imports} or {.field Suggests}, not {.field Depends}!", "v" = "Removing {.pkg data.table} from {.field Depends}." )) desc$del_dep("data.table", "Depends") desc$write() } use_import_from( "data.table", c("data.table", ":=", ".SD", ".BY", ".N", ".I", ".GRP", ".NGRP", ".EACHI") ) } usethis/R/write.R0000644000176200001440000000747214651000165013424 0ustar liggesusers#' Write into or over a file #' #' Helpers to write into or over a new or pre-existing file. Designed mostly for #' for internal use. File is written with UTF-8 encoding. #' #' @name write-this #' @param path Path to target file. It is created if it does not exist, but the #' parent directory must exist. #' @param lines Character vector of lines. For `write_union()`, these are lines #' to add to the target file, if not already present. For `write_over()`, #' these are the exact lines desired in the target file. #' @param quiet Logical. Whether to message about what is happening. #' @return Logical indicating whether a write occurred, invisibly. #' @keywords internal #' #' @examples #' \dontshow{ #' .old_wd <- setwd(tempdir()) #' } #' write_union("a_file", letters[1:3]) #' readLines("a_file") #' write_union("a_file", letters[1:5]) #' readLines("a_file") #' #' write_over("another_file", letters[1:3]) #' readLines("another_file") #' write_over("another_file", letters[1:3]) #' \dontrun{ #' ## will error if user isn't present to approve the overwrite #' write_over("another_file", letters[3:1]) #' } #' #' ## clean up #' file.remove("a_file", "another_file") #' \dontshow{ #' setwd(.old_wd) #' } NULL #' @describeIn write-this writes lines to a file, taking the union of what's #' already there, if anything, and some new lines. Note, there is no explicit #' promise about the line order. Designed to modify simple config files like #' `.Rbuildignore` and `.gitignore`. #' @export write_union <- function(path, lines, quiet = FALSE) { check_name(path) check_character(lines) check_bool(quiet) path <- user_path_prep(path) if (file_exists(path)) { existing_lines <- read_utf8(path) } else { existing_lines <- character() } new <- setdiff(lines, existing_lines) if (length(new) == 0) { return(invisible(FALSE)) } if (!quiet) { ui_bullets(c("v" = "Adding {.val {new}} to {.path {pth(path)}}.")) } all <- c(existing_lines, new) write_utf8(path, all) } #' @describeIn write-this writes a file with specific lines, creating it if #' necessary or overwriting existing, if proposed contents are not identical #' and user is available to give permission. #' @param overwrite Force overwrite of existing file? #' @export write_over <- function(path, lines, quiet = FALSE, overwrite = FALSE) { check_name(path) check_character(lines) stopifnot(length(lines) > 0) check_bool(quiet) check_bool(overwrite) path <- user_path_prep(path) if (same_contents(path, lines)) { return(invisible(FALSE)) } if (overwrite || can_overwrite(path)) { if (!quiet) { ui_bullets(c("v" = "Writing {.path {pth(path)}}.")) } write_utf8(path, lines) } else { if (!quiet) { ui_bullets(c("i" = "Leaving {.path {pth(path)}} unchanged.")) } invisible(FALSE) } } read_utf8 <- function(path, n = -1L) { base::readLines(path, n = n, encoding = "UTF-8", warn = FALSE) } write_utf8 <- function(path, lines, append = FALSE, line_ending = NULL) { check_name(path) check_character(lines) file_mode <- if (append) "ab" else "wb" con <- file(path, open = file_mode, encoding = "utf-8") withr::defer(close(con)) if (is.null(line_ending)) { if (is_in_proj(path)) { # path is in active project line_ending <- proj_line_ending() } else if (possibly_in_proj(path)) { # path is some other project line_ending <- with_project(proj_find(path), proj_line_ending(), quiet = TRUE) } else { line_ending <- platform_line_ending() } } # convert embedded newlines lines <- gsub("\r?\n", line_ending, lines) base::writeLines(enc2utf8(lines), con, sep = line_ending, useBytes = TRUE) invisible(TRUE) } same_contents <- function(path, contents) { if (!file_exists(path)) { return(FALSE) } identical(read_utf8(path), contents) } usethis/R/addin.R0000644000176200001440000000173114651000165013341 0ustar liggesusers#' Add minimal RStudio Addin binding #' #' This function helps you add a minimal #' [RStudio Addin](https://rstudio.github.io/rstudioaddins/) binding to #' `inst/rstudio/addins.dcf`. #' #' @param addin Name of the addin function, which should be defined in the #' `R` folder. #' @inheritParams use_template #' #' @export use_addin <- function(addin = "new_addin", open = rlang::is_interactive()) { addin_dcf_path <- proj_path("inst", "rstudio", "addins.dcf") if (!file_exists(addin_dcf_path)) { create_directory(proj_path("inst", "rstudio")) file_create(addin_dcf_path) ui_bullets(c("v" = "Creating {.path {pth(addin_dcf_path)}}")) } addin_info <- render_template("addins.dcf", data = list(addin = addin)) addin_info[length(addin_info) + 1] <- "" write_utf8(addin_dcf_path, addin_info, append = TRUE) ui_bullets(c( "v" = "Adding binding to {.fun {addin}} to {.path addins.dcf}" )) if (open) { edit_file(addin_dcf_path) } invisible(TRUE) } usethis/R/positron.R0000644000176200001440000000007614651514262014150 0ustar liggesusersis_positron <- function() { Sys.getenv("POSITRON") == "1" } usethis/R/git-default-branch.R0000644000176200001440000005145714717524762015755 0ustar liggesusers#' Get or set the default Git branch #' #' @description #' The `git_default_branch*()` functions put some structure around the somewhat #' fuzzy (but definitely real) concept of the default branch. In particular, #' they support new conventions around the Git default branch name, globally or #' in a specific project / Git repository. #' #' @section Background on the default branch: #' #' Technically, Git has no official concept of the default branch. But in #' reality, almost all Git repos have an *effective default branch*. If there's #' only one branch, this is it! It is the branch that most bug fixes and #' features get merged in to. It is the branch you see when you first visit a #' repo on a site such as GitHub. On a Git remote, it is the branch that `HEAD` #' points to. #' #' Historically, `master` has been the most common name for the default branch, #' but `main` is an increasingly popular choice. #' #' @section `git_default_branch_configure()`: #' This configures `init.defaultBranch` at the global (a.k.a user) level. This #' setting determines the name of the branch that gets created when you make the #' first commit in a new Git repo. `init.defaultBranch` only affects the local #' Git repos you create in the future. #' #' @section `git_default_branch()`: #' This figures out the default branch of the current Git repo, integrating #' information from the local repo and, if applicable, the `upstream` or #' `origin` remote. If there is a local vs. remote mismatch, #' `git_default_branch()` throws an error with advice to call #' `git_default_branch_rediscover()` to repair the situation. #' #' For a remote repo, the default branch is the branch that `HEAD` points to. #' #' For the local repo, if there is only one branch, that must be the default! #' Otherwise we try to identify the relevant local branch by looking for #' specific branch names, in this order: #' * whatever the default branch of `upstream` or `origin` is, if applicable #' * `main` #' * `master` #' * the value of the Git option `init.defaultBranch`, with the usual deal where #' a local value, if present, takes precedence over a global (a.k.a. #' user-level) value #' #' @section `git_default_branch_rediscover()`: #' This consults an external authority -- specifically, the remote **source #' repo** on GitHub -- to learn the default branch of the current project / #' repo. If that doesn't match the apparent local default branch (for example, #' the project switched from `master` to `main`), we do the corresponding branch #' renaming in your local repo and, if relevant, in your fork. #' #' See for more about #' GitHub remote configurations and, e.g., what we mean by the source repo. This #' function works for the configurations `"ours"`, `"fork"`, and `"theirs"`. #' @section `git_default_branch_rename()`: #' Note: this only works for a repo that you effectively own. In terms of #' GitHub, you must own the **source repo** personally or, if #' organization-owned, you must have `admin` permission on the **source repo**. #' #' This renames the default branch in the **source repo** on GitHub and then #' calls `git_default_branch_rediscover()`, to make any necessary changes in the #' local repo and, if relevant, in your personal fork. #' #' See for more about #' GitHub remote configurations and, e.g., what we mean by the source repo. This #' function works for the configurations `"ours"`, `"fork"`, and `"no_github"`. #' #' Regarding `"no_github"`: Of course, this function does what you expect for a #' local repo with no GitHub remotes, but that is not the primary use case. #' @return Name of the default branch. #' @name git-default-branch NULL #' @export #' @rdname git-default-branch #' @examples #' \dontrun{ #' git_default_branch() #' } git_default_branch <- function() { git_default_branch_(github_remote_config()) } # If config is available, we can use it to avoid an additional lookup # on the GitHub API git_default_branch_ <- function(cfg) { repo <- git_repo() upstream <- git_default_branch_remote(cfg, "upstream") if (is.na(upstream$default_branch)) { origin <- git_default_branch_remote(cfg, "origin") if (is.na(origin$default_branch)) { db_source <- list() } else { db_source <- origin } } else { db_source <- upstream } db_local_with_source <- tryCatch( guess_local_default_branch(db_source$default_branch), error = function(e) NA_character_ ) # these error sub-classes and error data are for the benefit of git_sitrep() if (is.na(db_local_with_source) ) { if (length(db_source)) { ui_abort(c( "x" = "Default branch mismatch between local repo and remote.", "i" = "The default branch of the {.val {db_source$name}} remote is {.val {db_source$default_branch}}.", " " = "But the local repo has no branch named {.val {db_source$default_branch}}.", "_" = "Call {.run [git_default_branch_rediscover()](usethis::git_default_branch_rediscover())} to resolve this." ), class = "error_default_branch", db_source = db_source ) } else { ui_abort( "Can't determine the local repo's default branch.", class = "error_default_branch" ) } } # we learned a default branch from the local repo if (is.null(db_source$default_branch) || is.na(db_source$default_branch) || identical(db_local_with_source, db_source$default_branch)) { return(db_local_with_source) } # we learned a default branch from the source repo and it doesn't match # the local default branch ui_abort(c( "x" = "Default branch mismatch between local repo and remote.", "i" = "The default branch of the {.val {db_source$name}} remote is {.val {db_source$default_branch}}.", " " = "But the default branch of the local repo appears to be {.val {db_local_with_source}}.", "_" = "Call {.run [git_default_branch_rediscover()](usethis::git_default_branch_rediscover())} to resolve this." ), class = "error_default_branch", db_source = db_source, db_local = db_local_with_source ) } # returns a whole data structure, because the caller needs the surrounding # context to produce a helpful error message git_default_branch_remote <- function(cfg, remote = "origin") { repo <- git_repo() out <- list( name = remote, is_configured = NA, url = NA_character_, repo_spec = NA_character_, default_branch = NA_character_ ) cfg_remote <- cfg[[remote]] if (!cfg_remote$is_configured) { out$is_configured <- FALSE return(out) } out$is_configured <- TRUE out$url <- cfg_remote$url if (!is.na(cfg_remote$default_branch)) { out$repo_spec <- cfg_remote$repo_spec out$default_branch <- cfg_remote$default_branch return(out) } # Fall back to pure git based approach out$default_branch <- tryCatch( { gert::git_fetch(remote = remote, repo = repo, verbose = FALSE) res <- gert::git_remote_ls(remote = remote, verbose = FALSE, repo = repo) path_file(res$symref[res$ref == "HEAD"]) }, error = function(e) NA_character_ ) out } default_branch_candidates <- function() { c( "main", "master", # we use `where = "de_facto"` so that one can configure init.defaultBranch # *locally* (which is unusual, but possible) in a repo that uses an # unconventional default branch name git_cfg_get("init.defaultBranch", where = "de_facto") ) } # `prefer` is available if you want to inject external information, such as # the default branch of a remote guess_local_default_branch <- function(prefer = NULL, verbose = FALSE) { repo <- git_repo() gb <- gert::git_branch_list(local = TRUE, repo = repo)[["name"]] if (length(gb) == 0) { ui_abort(c( "x" = "Can't find any local branches.", " " = "Do you need to make your first commit?" )) } candidates <- c(prefer, default_branch_candidates()) first_matched <- function(x, table) table[min(match(x, table), na.rm = TRUE)] if (length(gb) == 1) { db <- gb } else if (any(gb %in% candidates)) { db <- first_matched(gb, candidates) } else { # TODO: perhaps this should be classed, so I can catch it and distinguish # from the ui_abort() above, where there are no local branches. ui_abort(" Unable to guess which existing local branch plays the role of the default.") } if (verbose) { ui_bullets(c( "i" = "Local branch {.val {db}} appears to play the role of the default branch." )) } db } #' @export #' @rdname git-default-branch #' @param name Default name for the initial branch in new Git repositories. #' @examples #' \dontrun{ #' git_default_branch_configure() #' } git_default_branch_configure <- function(name = "main") { check_string(name) ui_bullets(c( "v" = "Configuring {.field init.defaultBranch} as {.val {name}}.", "i" = "Remember: this only affects repos you create in the future!" )) use_git_config(scope = "user", `init.defaultBranch` = name) invisible(name) } #' @export #' @rdname git-default-branch #' @param current_local_default Name of the local branch that is currently #' functioning as the default branch. If unspecified, this can often be #' inferred. #' @examples #' \dontrun{ #' git_default_branch_rediscover() #' #' # you can always explicitly specify the local branch that's been playing the #' # role of the default #' git_default_branch_rediscover("unconventional_default_branch_name") #' } git_default_branch_rediscover <- function(current_local_default = NULL) { rediscover_default_branch(old_name = current_local_default) } #' @export #' @rdname git-default-branch #' @param from Name of the branch that is currently functioning as the default #' branch. #' @param to New name for the default branch. #' @examples #' \dontrun{ #' git_default_branch_rename() #' #' # you can always explicitly specify one or both branch names #' git_default_branch_rename(from = "this", to = "that") #' } git_default_branch_rename <- function(from = NULL, to = "main") { repo <- git_repo() maybe_name(from) check_name(to) if (!is.null(from) && !gert::git_branch_exists(from, local = TRUE, repo = repo)) { ui_abort("Can't find existing branch named {.val {from}}.") } cfg <- github_remote_config(github_get = TRUE) check_for_config(cfg, ok_configs = c("ours", "fork", "no_github")) if (cfg$type == "no_github") { from <- from %||% guess_local_default_branch(verbose = TRUE) if (from == to) { ui_bullets(c( "i" = "Local repo already has {.val {from}} as its default branch." )) } else { ui_bullets(c( "v" = "Moving local {.val {from}} branch to {.val {to}}." )) gert::git_branch_move(branch = from, new_branch = to, repo = repo) rstudio_git_tickle() report_fishy_files(old_name = from, new_name = to) } return(invisible(to)) } # cfg is now either fork or ours tr <- target_repo(cfg, role = "source", ask = FALSE) old_source_db <- tr$default_branch if (!isTRUE(tr$can_admin)) { ui_abort(" You don't seem to have {.field admin} permissions for the source repo {.val {tr$repo_spec}}, which is required to rename the default branch.") } old_local_db <- from %||% guess_local_default_branch(old_source_db, verbose = FALSE) if (old_local_db != old_source_db) { ui_bullets(c( "!" = "It's weird that the current default branch for your local repo and the source repo are different:", " " = "{.val {old_local_db}} (local) != {.val {old_source_db}} (source)" )) if (ui_nah( "Are you sure you want to proceed?", yes = "yes", no = "no", shuffle = FALSE)) { ui_bullets(c("x" = "Cancelling.")) return(invisible()) } } source_update <- old_source_db != to if (source_update) { gh <- gh_tr(tr) gh( "POST /repos/{owner}/{repo}/branches/{from}/rename", from = old_source_db, new_name = to ) } if (source_update) { ui_bullets(c( "v" = "Default branch of the source repo {.val {tr$repo_spec}} has moved:", " " = "{.val {old_source_db}} {cli::symbol$arrow_right} {.val {to}}" )) } else { ui_bullets(c( "i" = "Default branch of source repo {.val {tr$repo_spec}} is {.val {to}}. Nothing to be done." )) } report_fishy_files(old_name = old_local_db, new_name = to) rediscover_default_branch(old_name = old_local_db, report_on_source = FALSE) } rediscover_default_branch <- function(old_name = NULL, report_on_source = TRUE) { maybe_name(old_name) # GitHub's official TODOs re: manually updating local environments # after a source repo renames the default branch: # git branch -m OLD-BRANCH-NAME NEW-BRANCH-NAME # git fetch origin # git branch -u origin/NEW-BRANCH-NAME NEW-BRANCH-NAME # git remote set-head origin -a # optionally # git remote prune origin # Note: they are assuming the relevant repo is known as origin, but it could # just as easily be, e.g., upstream. repo <- git_repo() if (!is.null(old_name) && !gert::git_branch_exists(old_name, local = TRUE, repo = repo)) { ui_abort("Can't find existing local branch named {.val {old_name}}.") } cfg <- github_remote_config(github_get = TRUE) check_for_config(cfg) tr <- target_repo(cfg, role = "source", ask = FALSE) db <- tr$default_branch # goal, in Git-speak: git remote set-head -a # goal, for humans: learn and record the default branch (i.e. the target of # the symbolic-ref refs/remotes//HEAD) for the named remote # https://git-scm.com/docs/git-remote#Documentation/git-remote.txt-emset-headem # for very stale repos, a fetch is a necessary pre-requisite # I provide `refspec = db` to avoid fetching all refs, which can be VERY slow # for a repo like ggplot2 (several minutes, with no progress reporting) gert::git_fetch(remote = tr$name, refspec = db, verbose = FALSE, repo = repo) gert::git_remote_ls(remote = tr$name, verbose = FALSE, repo = repo) old_name <- old_name %||% guess_local_default_branch(db, verbose = FALSE) local_update <- old_name != db if (local_update) { # goal, in Git-speak: git branch -m gert::git_branch_move(branch = old_name, new_branch = db, repo = repo) rstudio_git_tickle() } # goal, in Git-speak: git branch -u / gert::git_branch_set_upstream( branch = db, upstream = glue("{tr$name}/{db}"), repo = repo ) # goal: get rid of old remote tracking branch, e.g. origin/master # goal, in Git-speak: git remote prune origin # I provide a refspec to avoid fetching all refs, which can be VERY slow # for a repo like ggplot2 (several minutes, with no progress reporting) gert::git_fetch( remote = tr$name, refspec = glue("refs/heads/{old_name}:refs/remotes/{tr$name}/{old_name}"), verbose = FALSE, repo = repo, prune = TRUE ) # for "ours" and "theirs", the source repo is the only remote on our radar and # we're done ingesting the default branch from the source repo # but for "fork", we also need to update # the fork = the user's primary repo = the remote known as origin if (cfg$type == "fork") { old_name_fork <- cfg$origin$default_branch fork_update <- old_name_fork != db if (fork_update) { gh <- gh_tr(cfg$origin) gh( "POST /repos/{owner}/{repo}/branches/{from}/rename", from = old_name_fork, new_name = db ) gert::git_fetch(remote = "origin", refspec = db, verbose = FALSE, repo = repo) gert::git_remote_ls(remote = "origin", verbose = FALSE, repo = repo) gert::git_fetch( remote = "origin", refspec = glue("refs/heads/{old_name}:refs/remotes/origin/{old_name}"), verbose = FALSE, repo = repo, prune = TRUE ) } } if (report_on_source) { ui_bullets(c( "i" = "Default branch of the source repo {.val {tr$repo_spec}} is {.val {db}}." )) } if (local_update) { ui_bullets(c( "v" = "Default branch of local repo has moved: {.val {old_name}} {cli::symbol$arrow_right} {.val {db}}" )) } else { ui_bullets(c( "i" = "Default branch of local repo is {.val {db}}. Nothing to be done." )) } if (cfg$type == "fork") { if (fork_update) { ui_bullets(c( "v" = "Default branch of your fork has moved: {.val {old_name_fork}} {cli::symbol$arrow_right} {.val {db}}" )) } else { ui_bullets(c( "i" = "Default branch of your fork is {.val {db}}. Nothing to be done." )) } } invisible(db) } challenge_non_default_branch <- function(details = "Are you sure you want to proceed?", default_branch = NULL) { actual <- git_branch() default_branch <- default_branch %||% git_default_branch() if (actual != default_branch) { if (ui_nah(c( "!" = "Current branch ({.val {actual}}) is not repo's default branch ({.val {default_branch}}).", " " = details ))) { ui_abort("Cancelling. Not on desired branch.") } } invisible() } report_fishy_files <- function(old_name = "master", new_name = "main") { ui_bullets(c( "_" = "Be sure to update files that refer to the default branch by name.", " " = "Consider searching within your project for {.val {old_name}}." )) # I don't want failure of a fishy file check to EVER cause # git_default_branch_rename() to fail and prevent the call to # git_default_branch_rediscover() # using a simple try() wrapper because these hints are just "nice to have" try(fishy_github_actions(new_name = new_name), silent = TRUE) try(fishy_badges(old_name = old_name), silent = TRUE) try(fishy_bookdown_config(old_name = old_name), silent = TRUE) } # good test cases: downlit, purrr, pkgbuild, zealot, glue, bench, # textshaping, scales fishy_github_actions <- function(new_name = "main") { if (!uses_github_actions()) { return(invisible(character())) } workflow_dir <- proj_path(".github", "workflows") workflows <- dir_ls(workflow_dir, regexp = "[.]ya?ml$") f <- function(pth, new_name) { x <- yaml::read_yaml(pth) x_unlisted <- unlist(x) locs <- grep("branches", re_match(names(x_unlisted), "[^//.]+$")$.match) branches <- x_unlisted[locs] length(branches) == 0 || new_name %in% branches } includes_branch_name <- map_lgl(workflows, f, new_name = new_name) paths <- proj_rel_path(workflows[!includes_branch_name]) if (length(paths) == 0) { return(invisible(character())) } paths <- sort(paths) ui_paths <- map_chr(paths, ui_path_impl) # TODO: the ui_paths used to be a nested bullet list # if that ever becomes possible/easier with cli, go back to that ui_bullets(c( "x" = "{cli::qty(length(ui_paths))}{?No/This/These} GitHub Action file{?s} {?/doesn't/don't} mention the new default branch {.val {new_name}}:", " " = "{.path {ui_paths}}" )) invisible(paths) } fishy_badges <- function(old_name = "master") { path <- find_readme() if (is.null(path)) { return(invisible(character())) } readme_lines <- read_utf8(path) badge_lines_range <- block_find( readme_lines, block_start = badge_start, block_end = badge_end ) if (length(badge_lines_range) != 2) { return(invisible(character())) } badge_lines <- readme_lines[badge_lines_range[1]:badge_lines_range[2]] if (!any(grepl(old_name, badge_lines))) { return(invisible(character())) } ui_bullets(c( "x" = "Some badges appear to refer to the old default branch {.val {old_name}}.", "_" = "Check and correct, if needed, in this file: {.path {pth(path)}}" )) invisible(path) } fishy_bookdown_config <- function(old_name = "master") { # https://github.com/dncamp/shift/blob/a12a3fb0cd30ae864525f7a9f1f907a05f15f9a3/_bookdown.yml # https://github.com/Jattan08/Wonderland/blob/b9e7ddc694871d1d13a2a02abe2d3b4a944c4653/_bookdown.yml # edit: https://github.com/dncamp/shift/edit/master/%s # view: https://github.com/dncamp/shift/blob/master/%s # history: https://github.com/YOUR GITHUB USERNAME/YOUR REPO NAME/commits/master/%s bookdown_config <- dir_ls( proj_get(), regexp = "_bookdown[.]ya?ml$", recurse = TRUE ) if (length(bookdown_config) == 0) { return(invisible(character())) } # I am (very weakly) worried about more than 1 match, hence the [[1]] bookdown_config <- purrr::discard(bookdown_config, ~ grepl("revdep", .x))[[1]] bookdown_config_lines <- read_utf8(bookdown_config) linky_lines <- grep("^(edit|view|history)", bookdown_config_lines, value = TRUE) if (!any(grepl(old_name, linky_lines))) { return(invisible(character())) } ui_bullets(c( "x" = "The bookdown configuration file may refer to the old default branch {.val {old_name}}.", "_" = "Check and correct, if needed, in this file: {.path {pth(bookdown_config)}}" )) invisible(path) } usethis/R/import-standalone-obj-type.R0000644000176200001440000002113414717524721017464 0ustar liggesusers# Standalone file: do not edit by hand # Source: https://github.com/r-lib/rlang/blob/HEAD/R/standalone-obj-type.R # Generated by: usethis::use_standalone("r-lib/rlang", "obj-type") # ---------------------------------------------------------------------- # # --- # repo: r-lib/rlang # file: standalone-obj-type.R # last-updated: 2024-02-14 # license: https://unlicense.org # imports: rlang (>= 1.1.0) # --- # # ## Changelog # # 2024-02-14: # - `obj_type_friendly()` now works for S7 objects. # # 2023-05-01: # - `obj_type_friendly()` now only displays the first class of S3 objects. # # 2023-03-30: # - `stop_input_type()` now handles `I()` input literally in `arg`. # # 2022-10-04: # - `obj_type_friendly(value = TRUE)` now shows numeric scalars # literally. # - `stop_friendly_type()` now takes `show_value`, passed to # `obj_type_friendly()` as the `value` argument. # # 2022-10-03: # - Added `allow_na` and `allow_null` arguments. # - `NULL` is now backticked. # - Better friendly type for infinities and `NaN`. # # 2022-09-16: # - Unprefixed usage of rlang functions with `rlang::` to # avoid onLoad issues when called from rlang (#1482). # # 2022-08-11: # - Prefixed usage of rlang functions with `rlang::`. # # 2022-06-22: # - `friendly_type_of()` is now `obj_type_friendly()`. # - Added `obj_type_oo()`. # # 2021-12-20: # - Added support for scalar values and empty vectors. # - Added `stop_input_type()` # # 2021-06-30: # - Added support for missing arguments. # # 2021-04-19: # - Added support for matrices and arrays (#141). # - Added documentation. # - Added changelog. # # nocov start #' Return English-friendly type #' @param x Any R object. #' @param value Whether to describe the value of `x`. Special values #' like `NA` or `""` are always described. #' @param length Whether to mention the length of vectors and lists. #' @return A string describing the type. Starts with an indefinite #' article, e.g. "an integer vector". #' @noRd obj_type_friendly <- function(x, value = TRUE) { if (is_missing(x)) { return("absent") } if (is.object(x)) { if (inherits(x, "quosure")) { type <- "quosure" } else { type <- class(x)[[1L]] } return(sprintf("a <%s> object", type)) } if (!is_vector(x)) { return(.rlang_as_friendly_type(typeof(x))) } n_dim <- length(dim(x)) if (!n_dim) { if (!is_list(x) && length(x) == 1) { if (is_na(x)) { return(switch( typeof(x), logical = "`NA`", integer = "an integer `NA`", double = if (is.nan(x)) { "`NaN`" } else { "a numeric `NA`" }, complex = "a complex `NA`", character = "a character `NA`", .rlang_stop_unexpected_typeof(x) )) } show_infinites <- function(x) { if (x > 0) { "`Inf`" } else { "`-Inf`" } } str_encode <- function(x, width = 30, ...) { if (nchar(x) > width) { x <- substr(x, 1, width - 3) x <- paste0(x, "...") } encodeString(x, ...) } if (value) { if (is.numeric(x) && is.infinite(x)) { return(show_infinites(x)) } if (is.numeric(x) || is.complex(x)) { number <- as.character(round(x, 2)) what <- if (is.complex(x)) "the complex number" else "the number" return(paste(what, number)) } return(switch( typeof(x), logical = if (x) "`TRUE`" else "`FALSE`", character = { what <- if (nzchar(x)) "the string" else "the empty string" paste(what, str_encode(x, quote = "\"")) }, raw = paste("the raw value", as.character(x)), .rlang_stop_unexpected_typeof(x) )) } return(switch( typeof(x), logical = "a logical value", integer = "an integer", double = if (is.infinite(x)) show_infinites(x) else "a number", complex = "a complex number", character = if (nzchar(x)) "a string" else "\"\"", raw = "a raw value", .rlang_stop_unexpected_typeof(x) )) } if (length(x) == 0) { return(switch( typeof(x), logical = "an empty logical vector", integer = "an empty integer vector", double = "an empty numeric vector", complex = "an empty complex vector", character = "an empty character vector", raw = "an empty raw vector", list = "an empty list", .rlang_stop_unexpected_typeof(x) )) } } vec_type_friendly(x) } vec_type_friendly <- function(x, length = FALSE) { if (!is_vector(x)) { abort("`x` must be a vector.") } type <- typeof(x) n_dim <- length(dim(x)) add_length <- function(type) { if (length && !n_dim) { paste0(type, sprintf(" of length %s", length(x))) } else { type } } if (type == "list") { if (n_dim < 2) { return(add_length("a list")) } else if (is.data.frame(x)) { return("a data frame") } else if (n_dim == 2) { return("a list matrix") } else { return("a list array") } } type <- switch( type, logical = "a logical %s", integer = "an integer %s", numeric = , double = "a double %s", complex = "a complex %s", character = "a character %s", raw = "a raw %s", type = paste0("a ", type, " %s") ) if (n_dim < 2) { kind <- "vector" } else if (n_dim == 2) { kind <- "matrix" } else { kind <- "array" } out <- sprintf(type, kind) if (n_dim >= 2) { out } else { add_length(out) } } .rlang_as_friendly_type <- function(type) { switch( type, list = "a list", NULL = "`NULL`", environment = "an environment", externalptr = "a pointer", weakref = "a weak reference", S4 = "an S4 object", name = , symbol = "a symbol", language = "a call", pairlist = "a pairlist node", expression = "an expression vector", char = "an internal string", promise = "an internal promise", ... = "an internal dots object", any = "an internal `any` object", bytecode = "an internal bytecode object", primitive = , builtin = , special = "a primitive function", closure = "a function", type ) } .rlang_stop_unexpected_typeof <- function(x, call = caller_env()) { abort( sprintf("Unexpected type <%s>.", typeof(x)), call = call ) } #' Return OO type #' @param x Any R object. #' @return One of `"bare"` (for non-OO objects), `"S3"`, `"S4"`, #' `"R6"`, or `"S7"`. #' @noRd obj_type_oo <- function(x) { if (!is.object(x)) { return("bare") } class <- inherits(x, c("R6", "S7_object"), which = TRUE) if (class[[1]]) { "R6" } else if (class[[2]]) { "S7" } else if (isS4(x)) { "S4" } else { "S3" } } #' @param x The object type which does not conform to `what`. Its #' `obj_type_friendly()` is taken and mentioned in the error message. #' @param what The friendly expected type as a string. Can be a #' character vector of expected types, in which case the error #' message mentions all of them in an "or" enumeration. #' @param show_value Passed to `value` argument of `obj_type_friendly()`. #' @param ... Arguments passed to [abort()]. #' @inheritParams args_error_context #' @noRd stop_input_type <- function(x, what, ..., allow_na = FALSE, allow_null = FALSE, show_value = TRUE, arg = caller_arg(x), call = caller_env()) { # From standalone-cli.R cli <- env_get_list( nms = c("format_arg", "format_code"), last = topenv(), default = function(x) sprintf("`%s`", x), inherit = TRUE ) if (allow_na) { what <- c(what, cli$format_code("NA")) } if (allow_null) { what <- c(what, cli$format_code("NULL")) } if (length(what)) { what <- oxford_comma(what) } if (inherits(arg, "AsIs")) { format_arg <- identity } else { format_arg <- cli$format_arg } message <- sprintf( "%s must be %s, not %s.", format_arg(arg), what, obj_type_friendly(x, value = show_value) ) abort(message, ..., call = call, arg = arg) } oxford_comma <- function(chr, sep = ", ", final = "or") { n <- length(chr) if (n < 2) { return(chr) } head <- chr[seq_len(n - 1)] last <- chr[n] head <- paste(head, collapse = sep) # Write a or b. But a, b, or c. if (n > 2) { paste0(head, sep, final, " ", last) } else { paste0(head, " ", final, " ", last) } } # nocov end usethis/R/roxygen.R0000644000176200001440000000677514717717764014021 0ustar liggesusers#' Use roxygen2 with markdown #' #' If you are already using roxygen2, but not with markdown, you'll need to use #' [roxygen2md](https://roxygen2md.r-lib.org) to convert existing Rd expressions #' to markdown. The conversion is not perfect, so make sure to check the #' results. #' #' @param overwrite Whether to overwrite an existing `Roxygen` field in #' `DESCRIPTION` with `"list(markdown = TRUE)"`. #' #' #' @export use_roxygen_md <- function(overwrite = FALSE) { check_installed("roxygen2") if (!uses_roxygen()) { roxy_ver <- as.character(utils::packageVersion("roxygen2")) proj_desc_field_update("Roxygen", "list(markdown = TRUE)", overwrite = FALSE) proj_desc_field_update("RoxygenNote", roxy_ver, overwrite = FALSE) ui_bullets(c("_" = "Run {.run devtools::document()}.")) return(invisible()) } already_setup <- uses_roxygen_md() if (isTRUE(already_setup)) { return(invisible()) } if (isFALSE(already_setup) || isTRUE(overwrite)) { proj_desc_field_update("Roxygen", "list(markdown = TRUE)", overwrite = TRUE) check_installed("roxygen2md") ui_bullets(c( "_" = "Run {.run roxygen2md::roxygen2md()} to convert existing Rd comments to markdown." )) if (!uses_git()) { ui_bullets(c( "!" = "Consider using Git for greater visibility into and control over the conversion process." )) } ui_bullets(c("_" = "Run {.run devtools::document()} when you're done.")) return(invisible()) } ui_abort(c( "DESCRIPTION already has a {.field Roxygen} field.", "Delete that field and try again or call {.code use_roxygen_md(overwrite = TRUE)}." )) invisible() } # FALSE: no Roxygen field # TRUE: matches regex targetting 'markdown = TRUE', with some whitespace slop # NA: everything else uses_roxygen_md <- function() { desc <- proj_desc() if (!desc$has_fields("Roxygen")) { return(FALSE) } roxygen <- desc$get_field("Roxygen", "") if (grepl("markdown\\s*=\\s*TRUE", roxygen)) { TRUE } else { NA } } uses_roxygen <- function() { proj_desc()$has_fields("RoxygenNote") } roxygen_ns_append <- function(tag) { block_append( tag, glue("#' {tag}"), path = proj_path(package_doc_path()), block_start = "## usethis namespace: start", block_end = "## usethis namespace: end", block_suffix = "NULL", sort = TRUE ) } roxygen_ns_show <- function() { block_show( path = proj_path(package_doc_path()), block_start = "## usethis namespace: start", block_end = "## usethis namespace: end" ) } roxygen_remind <- function() { ui_bullets(c( "_" = "Run {.run devtools::document()} to update {.path {pth('NAMESPACE')}}." )) TRUE } roxygen_update_ns <- function(load = is_interactive()) { ui_bullets(c("v" = "Writing {.path {pth('NAMESPACE')}}.")) utils::capture.output( suppressMessages(roxygen2::roxygenise(proj_get(), "namespace")) ) if (load) { ui_bullets(c("v" = "Loading {.pkg {project_name()}}.")) pkgload::load_all(path = proj_get(), quiet = TRUE) } TRUE } # Checkers ---------------------------------------------------------------- check_uses_roxygen <- function(whos_asking) { force(whos_asking) if (uses_roxygen()) { return(invisible()) } whos_asking_fn <- sub("()", "", whos_asking, fixed = TRUE) ui_abort(c( "Package {.pkg {project_name()}} does not use roxygen2.", "{.fun {whos_asking_fn}} can not work without it.", "You might just need to run {.run devtools::document()} once, then try again." )) } usethis/R/ui-legacy.R0000644000176200001440000002176714717524762014175 0ustar liggesusers#' Legacy functions related to user interface #' #' @description #' `r lifecycle::badge("superseded")` #' #' These functions are now superseded. External users of the `usethis::ui_*()` #' functions are encouraged to use the [cli package](https://cli.r-lib.org/) #' instead. The cli package did not have the required functionality when the #' `usethis::ui_*()` functions were created, but it has had that for a while #' now and it's the superior option. There is even a cli vignette about how to #' make this transition: `vignette("usethis-ui", package = "cli")`. #' #' usethis itself now uses cli internally for its UI, but these new functions #' are not exported and presumably never will be. There is a developer-focused #' article on the process of transitioning usethis's own UI to use cli: #' [Converting usethis's UI to use cli](https://usethis.r-lib.org/articles/ui-cli-conversion.html). #' @details #' #' The `ui_` functions can be broken down into four main categories: #' #' * block styles: `ui_line()`, `ui_done()`, `ui_todo()`, `ui_oops()`, #' `ui_info()`. #' * conditions: `ui_stop()`, `ui_warn()`. #' * questions: [ui_yeah()], [ui_nope()]. #' * inline styles: `ui_field()`, `ui_value()`, `ui_path()`, `ui_code()`, #' `ui_unset()`. #' #' The question functions [ui_yeah()] and [ui_nope()] have their own [help #' page][ui-questions]. #' #' All UI output (apart from `ui_yeah()`/`ui_nope()` prompts) can be silenced #' by setting `options(usethis.quiet = TRUE)`. Use [ui_silence()] to silence #' selected actions. #' #' @param x A character vector. #' #' For block styles, conditions, and questions, each element of the #' vector becomes a line, and the result is processed by [glue::glue()]. #' For inline styles, each element of the vector becomes an entry in a #' comma separated list. #' @param .envir Used to ensure that [glue::glue()] gets the correct #' environment. For expert use only. #' #' @return The block styles, conditions, and questions are called for their #' side-effect. The inline styles return a string. #' @keywords internal #' @name ui-legacy-functions #' @examples #' new_val <- "oxnard" #' ui_done("{ui_field('name')} set to {ui_value(new_val)}") #' ui_todo("Redocument with {ui_code('devtools::document()')}") #' #' ui_code_block(c( #' "Line 1", #' "Line 2", #' "Line 3" #' )) NULL # Block styles ------------------------------------------------------------ #' @rdname ui-legacy-functions #' @export ui_line <- function(x = character(), .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) ui_inform(x) } #' @rdname ui-legacy-functions #' @export ui_todo <- function(x, .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) ui_legacy_bullet(x, crayon::red(cli::symbol$bullet)) } #' @rdname ui-legacy-functions #' @export ui_done <- function(x, .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) ui_legacy_bullet(x, crayon::green(cli::symbol$tick)) } #' @rdname ui-legacy-functions #' @export ui_oops <- function(x, .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) ui_legacy_bullet(x, crayon::red(cli::symbol$cross)) } #' @rdname ui-legacy-functions #' @export ui_info <- function(x, .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) ui_legacy_bullet(x, crayon::yellow(cli::symbol$info)) } #' @param copy If `TRUE`, the session is interactive, and the clipr package #' is installed, will copy the code block to the clipboard. #' @rdname ui-legacy-functions #' @export ui_code_block <- function(x, copy = rlang::is_interactive(), .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) block <- indent(x, " ") block <- crayon::silver(block) ui_inform(block) if (copy && clipr::clipr_available()) { x <- crayon::strip_style(x) clipr::write_clip(x) ui_inform(" [Copied to clipboard]") } } # Conditions -------------------------------------------------------------- #' @rdname ui-legacy-functions #' @export ui_stop <- function(x, .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) cnd <- structure( class = c("usethis_error", "error", "condition"), list(message = x) ) stop(cnd) } #' @rdname ui-legacy-functions #' @export ui_warn <- function(x, .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) warning(x, call. = FALSE, immediate. = TRUE) } # Questions --------------------------------------------------------------- #' User interface - Questions #' #' @description #' `r lifecycle::badge("superseded")` #' #' `ui_yeah()` and `ui_nope()` are technically superseded, but, unlike the rest #' of the legacy [`ui_*()`][ui-legacy-functions] functions, there's not yet a #' drop-in replacement available in the [cli package](https://cli.r-lib.org/). #' `ui_yeah()` and `ui_nope()` are no longer used internally in usethis. #' #' @inheritParams ui-legacy-functions #' @param yes A character vector of "yes" strings, which are randomly sampled to #' populate the menu. #' @param no A character vector of "no" strings, which are randomly sampled to #' populate the menu. #' @param n_yes An integer. The number of "yes" strings to include. #' @param n_no An integer. The number of "no" strings to include. #' @param shuffle A logical. Should the order of the menu options be randomly #' shuffled? #' #' @return A logical. `ui_yeah()` returns `TRUE` when the user selects a "yes" #' option and `FALSE` otherwise, i.e. when user selects a "no" option or #' refuses to make a selection (cancels). `ui_nope()` is the logical opposite #' of `ui_yeah()`. #' @name ui-questions #' @keywords internal #' @examples #' \dontrun{ #' ui_yeah("Do you like R?") #' ui_nope("Have you tried turning it off and on again?", n_yes = 1, n_no = 1) #' ui_yeah("Are you sure its plugged in?", yes = "Yes", no = "No", shuffle = FALSE) #' } NULL #' @rdname ui-questions #' @export ui_yeah <- function(x, yes = c("Yes", "Definitely", "For sure", "Yup", "Yeah", "I agree", "Absolutely"), no = c("No way", "Not now", "Negative", "No", "Nope", "Absolutely not"), n_yes = 1, n_no = 2, shuffle = TRUE, .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) if (!is_interactive()) { ui_stop(c( "User input required, but session is not interactive.", "Query: {x}" )) } n_yes <- min(n_yes, length(yes)) n_no <- min(n_no, length(no)) qs <- c(sample(yes, n_yes), sample(no, n_no)) if (shuffle) { qs <- sample(qs) } # TODO: should this be ui_inform()? # later observation: probably not? you would not want these prompts to be # suppressed when `usethis.quiet = TRUE`, i.e. if the menu() appears, then # the introduction should also always appear rlang::inform(x) out <- utils::menu(qs) out != 0L && qs[[out]] %in% yes } #' @rdname ui-questions #' @export ui_nope <- function(x, yes = c("Yes", "Definitely", "For sure", "Yup", "Yeah", "I agree", "Absolutely"), no = c("No way", "Not now", "Negative", "No", "Nope", "Absolutely not"), n_yes = 1, n_no = 2, shuffle = TRUE, .envir = parent.frame()) { # TODO(jennybc): is this correct in the case of no selection / cancelling? !ui_yeah( x = x, yes = yes, no = no, n_yes = n_yes, n_no = n_no, shuffle = shuffle, .envir = .envir ) } # Inline styles ----------------------------------------------------------- #' @rdname ui-legacy-functions #' @export ui_field <- function(x) { x <- crayon::green(x) x <- glue_collapse(x, sep = ", ") x } #' @rdname ui-legacy-functions #' @export ui_value <- function(x) { if (is.character(x)) { x <- encodeString(x, quote = "'") } x <- crayon::blue(x) x <- glue_collapse(x, sep = ", ") x } #' @rdname ui-legacy-functions #' @export #' @param base If specified, paths will be displayed relative to this path. ui_path <- function(x, base = NULL) { ui_value(ui_path_impl(x, base = base)) } #' @rdname ui-legacy-functions #' @export ui_code <- function(x) { x <- encodeString(x, quote = "`") x <- crayon::silver(x) x <- glue_collapse(x, sep = ", ") x } #' @rdname ui-legacy-functions #' @export ui_unset <- function(x = "unset") { check_string(x) x <- glue("<{x}>") x <- crayon::silver(x) x } # rlang::inform() wrappers ----------------------------------------------------- indent <- function(x, first = " ", indent = first) { x <- gsub("\n", paste0("\n", indent), x) paste0(first, x) } ui_legacy_bullet <- function(x, bullet = cli::symbol$bullet) { bullet <- paste0(bullet, " ") x <- indent(x, bullet, " ") ui_inform(x) } # All UI output must eventually go through ui_inform() so that it # can be quieted with 'usethis.quiet' when needed. ui_inform <- function(...) { if (!is_quiet()) { inform(paste0(...)) } invisible() } usethis/R/release.R0000644000176200001440000003756614717524762013742 0ustar liggesusers#' Create a release checklist in a GitHub issue #' #' @description #' When preparing to release a package to CRAN there are quite a few steps that #' need to be performed, and some of the steps can take multiple hours. This #' function creates a checklist in a GitHub issue to: #' #' * Help you keep track of where you are in the process #' * Feel a sense of satisfaction as you progress towards final submission #' * Help watchers of your package stay informed. #' #' The checklist contains a generic set of steps that we've found to be helpful, #' based on the type of release ("patch", "minor", or "major"). You're #' encouraged to edit the issue to customize this list to meet your needs. #' #' ## Customization #' #' * If you want to consistently add extra bullets for every release, you can #' include your own custom bullets by providing an (unexported) #' `release_bullets()` function that returns a character vector. #' (For historical reasons, `release_questions()` is also supported). #' #' * If you want to check additional packages in the revdep check process, #' provide an (unexported) `release_extra_revdeps()` function that #' returns a character vector. This is currently only supported for #' Posit internal check tooling. #' #' @param version Optional version number for release. If unspecified, you can #' make an interactive choice. #' @export #' @examples #' \dontrun{ #' use_release_issue("2.0.0") #' } use_release_issue <- function(version = NULL) { check_is_package("use_release_issue()") tr <- target_repo(github_get = TRUE) if (!isTRUE(tr$can_push)) { ui_bullets(c( "!" = "It is very unusual to open a release issue on a repo you can't push to ({.val {tr$repo_spec}})." )) if (ui_nah("Do you really want to do this?")) { ui_bullets(c("x" = "Cancelling.")) return(invisible()) } } version <- version %||% choose_version( "What should the release version be?", which = c("major", "minor", "patch") ) if (is.null(version)) { return(invisible(FALSE)) } on_cran <- !is.null(cran_version()) checklist <- release_checklist(version, on_cran, tr) gh <- gh_tr(tr) issue <- gh( "POST /repos/{owner}/{repo}/issues", title = glue("Release {project_name()} {version}"), body = paste0(checklist, "\n", collapse = "") ) Sys.sleep(1) view_url(issue$html_url) } release_checklist <- function(version, on_cran, target_repo = NULL) { type <- release_type(version) cran_results <- cran_results_url() has_news <- file_exists(proj_path("NEWS.md")) has_pkgdown <- uses_pkgdown() has_lifecycle <- proj_desc()$has_dep("lifecycle") has_readme <- file_exists(proj_path("README.Rmd")) has_github_links <- has_github_links(target_repo) is_posit_pkg <- is_posit_pkg() milestone_num <- gh_milestone_number(target_repo, version) c( if (!on_cran) c( "First release:", "", todo("`usethis::use_news_md()`", !has_news), todo("`usethis::use_cran_comments()`"), todo("Update (aspirational) install instructions in README"), todo("Proofread `Title:` and `Description:`"), todo("Check that all exported functions have `@return` and `@examples`"), todo("Check that `Authors@R:` includes a copyright holder (role 'cph')"), todo("Check [licensing of included files](https://r-pkgs.org/license.html#sec-code-you-bundle)"), todo("Review "), "" ), "Prepare for release:", "", todo("`git pull`"), todo("[Close v{version} milestone](../milestone/{milestone_num})", !is.na(milestone_num)), todo("Check [current CRAN check results]({cran_results})", on_cran), todo(" Check if any deprecation processes should be advanced, as described in \\ [Gradual deprecation](https://lifecycle.r-lib.org/articles/communicate.html#gradual-deprecation)", type != "patch" && has_lifecycle), todo("`usethis::use_news_md()`", on_cran && !has_news), todo("[Polish NEWS](https://style.tidyverse.org/news.html#news-release)", on_cran), todo("`usethis::use_github_links()`", !has_github_links), todo("`urlchecker::url_check()`"), todo("`devtools::build_readme()`", has_readme), todo("`devtools::check(remote = TRUE, manual = TRUE)`"), todo("`devtools::check_win_devel()`"), release_revdepcheck(on_cran, is_posit_pkg), todo("Update `cran-comments.md`", on_cran), todo("`git push`"), todo("Draft blog post", type != "patch"), todo("Slack link to draft blog in #open-source-comms", type != "patch" && is_posit_pkg), release_extra_bullets(), "", "Submit to CRAN:", "", todo("`usethis::use_version('{type}')`"), todo("`devtools::submit_cran()`"), todo("Approve email"), "", "Wait for CRAN...", "", todo("Accepted :tada:"), todo("Finish & publish blog post", type != "patch"), todo("Add link to blog post in pkgdown news menu", type != "patch"), todo("`usethis::use_github_release()`"), todo("`usethis::use_dev_version(push = TRUE)`"), todo("`usethis::use_news_md()`", !has_news), todo("Share on social media", type != "patch") ) } gh_milestone_number <- function(target_repo, version, state = "open") { gh <- gh_tr(target_repo) milestones <- tryCatch( gh("/repos/{owner}/{repo}/milestones", state = state), error = function(e) list() ) titles <- map_chr(milestones, "title") numbers <- map_int(milestones, "number") numbers[match(paste0("v", version), titles)] } # Get revdeps for current package get_revdeps <- function() { pkg <- proj_desc()$get_field("Package") tools::package_dependencies(pkg, which = "all", reverse = TRUE)[[pkg]] } release_revdepcheck <- function(on_cran = TRUE, is_posit_pkg = TRUE, env = NULL) { if (!on_cran || length(get_revdeps()) == 0) { return() } env <- env %||% safe_pkg_env() if (env_has(env, "release_extra_revdeps")) { extra <- env$release_extra_revdeps() stopifnot(is.character(extra)) } else { extra <- character() } if (is_posit_pkg) { if (length(extra) > 0) { extra_code <- paste0(deparse(extra), collapse = "") todo("`revdepcheck::cloud_check(extra_revdeps = {extra_code})`") } else { todo("`revdepcheck::cloud_check()`") } } else { todo("`revdepcheck::revdep_check(num_workers = 4)`") } } release_extra_bullets <- function(env = NULL) { env <- env %||% safe_pkg_env() if (env_has(env, "release_bullets")) { paste0("* [ ] ", env$release_bullets()) } else if (env_has(env, "release_questions")) { # For backwards compatibility with devtools paste0("* [ ] ", env$release_questions()) } else { character() } } safe_pkg_env <- function() { tryCatch( ns_env(project_name()), error = function(e) emptyenv() ) } release_type <- function(version) { x <- unclass(numeric_version(version))[[1]] n <- length(x) if (n >= 3 && x[[3]] != 0L) { "patch" } else if (n >= 2 && x[[2]] != 0L) { "minor" } else { "major" } } #' Publish a GitHub release #' #' @description #' Pushes the current branch (if safe) then publishes a GitHub release for the #' latest CRAN submission. #' #' If you use [devtools::submit_cran()] to submit to CRAN, information about the #' submitted state is captured in a `CRAN-SUBMISSION` file. #' `use_github_release()` uses this info to populate the GitHub release notes #' and, after success, deletes the file. In the absence of such a file, we #' assume that current state (SHA of `HEAD`, package version, NEWS) is the #' submitted state. #' #' @param publish If `TRUE`, publishes a release. If `FALSE`, creates a draft #' release. #' @export use_github_release <- function(publish = TRUE) { check_is_package("use_github_release()") tr <- target_repo(github_get = TRUE, ok_configs = c("ours", "fork")) check_can_push(tr = tr, "to create a release") dat <- get_release_data(tr) release_name <- glue("{dat$Package} {dat$Version}") tag_name <- glue("v{dat$Version}") kv_line("Release name", release_name) kv_line("Tag name", tag_name) kv_line("SHA", dat$SHA) if (git_can_push()) { git_push() } check_github_has_SHA(SHA = dat$SHA, tr = tr) on_cran <- !is.null(cran_version()) news <- get_release_news(SHA = dat$SHA, tr = tr, on_cran = on_cran) gh <- gh_tr(tr) ui_bullets("Publishing {tag_name} release to GitHub") release <- gh( "POST /repos/{owner}/{repo}/releases", name = release_name, tag_name = tag_name, target_commitish = dat$SHA, body = news, draft = !publish ) ui_bullets("Release at {.url {release$html_url}}") if (!is.null(dat$file)) { ui_bullets("Deleting {.path {dat$file}}") file_delete(dat$file) } invisible() } get_release_data <- function(tr = target_repo(github_get = TRUE)) { cran_submission <- path_first_existing(proj_path(c("CRAN-SUBMISSION", "CRAN-RELEASE"))) if (is.null(cran_submission)) { ui_bullets(c("v" = "Using current HEAD commit for the release.")) challenge_non_default_branch() check_branch_pushed() return(list( Package = project_name(), Version = proj_version(), SHA = gert::git_info(repo = git_repo())$commit )) } if (path_file(cran_submission) == "CRAN-SUBMISSION") { # new style ---- # Version: 2.4.2 # Date: 2021-10-13 20:40:36 UTC # SHA: fbe18b5a22be8ebbb61fa7436e826ba8d7f485a9 out <- as.list(read.dcf(cran_submission)[1, ]) } if (path_file(cran_submission) == "CRAN-RELEASE") { gh <- gh_tr(tr) # old style ---- # This package was submitted to CRAN on 2021-10-13. # Once it is accepted, delete this file and tag the release (commit e10658f5). lines <- read_utf8(cran_submission) str_extract <- function(marker, pattern) { re_match(grep(marker, lines, value = TRUE), pattern)$.match } date <- str_extract("submitted.*on", "[0-9]{4}-[0-9]{2}-[0-9]{2}") sha <- str_extract("commit", "[[:xdigit:]]{7,40}") if (nchar(sha) != 40) { # the release endpoint requires the full sha sha <- gh("/repos/{owner}/{repo}/commits/{commit_sha}", commit_sha = sha)$sha } HEAD <- gert::git_info(repo = git_repo())$commit if (HEAD == sha) { version <- proj_version() } else { tf <- withr::local_tempfile() gh( "/repos/{owner}/{repo}/contents/{path}", path = "DESCRIPTION", ref = sha, .destfile = tf, .accept = "application/vnd.github.v3.raw" ) version <- desc::desc_get_version(tf) } out <- list( Version = version, Date = Sys.Date(), SHA = sha ) } out$Package <- project_name() out$file <- cran_submission ui_bullets(c( "{.path {pth(out$file)}} file found, from a submission on {as.Date(out$Date)}." )) out } check_github_has_SHA <- function(SHA = gert::git_info(repo = git_repo())$commit, tr = target_repo(github_get = TRUE)) { safe_gh <- purrr::safely(gh_tr(tr)) SHA_GET <- safe_gh( "/repos/{owner}/{repo}/git/commits/{commit_sha}", commit_sha = SHA ) if (is.null(SHA_GET$error)) { return() } if (inherits(SHA_GET$error, "http_error_404")) { ui_abort(c( "Can't find SHA {.val {substr(SHA, 1, 7)}} in {.val {tr$repo_spec}}.", "Do you need to push?" )) } ui_abort("Internal error: Unexpected error when checking for SHA on GitHub.") } get_release_news <- function(SHA = gert::git_info(repo = git_repo())$commit, tr = target_repo(github_get = TRUE), on_cran = !is.null(cran_version())) { HEAD <- gert::git_info(repo = git_repo())$commit if (HEAD == SHA) { news_path <- proj_path("NEWS.md") news <- if (file_exists(news_path)) read_utf8(news_path) else NULL } else { news <- tryCatch( read_github_file( tr$repo_spec, path = "NEWS.md", ref = SHA, host = tr$api_url ), github_error = NULL ) } if (is.null(news)) { ui_bullets(c( "x" = "Can't find {.path {pth('NEWS.md')}} in the released package source.", "i" = "{.pkg usethis} consults this file for release notes.", "i" = "Call {.run usethis::use_news_md()} to set this up for the future." )) if (on_cran) "-- no release notes --" else "Initial release" } else { news_latest(news) } } cran_version <- function(package = project_name(), available = NULL) { if (!curl::has_internet()) { return(NULL) } if (is.null(available)) { # Guard against CRAN mirror being unset available <- tryCatch( available.packages(repos = default_cran_mirror()), error = function(e) NULL ) if (is.null(available)) { return(NULL) } } idx <- available[, "Package"] == package if (any(idx)) { as.package_version(available[package, "Version"]) } else { NULL } } cran_results_url <- function(package = project_name()) { glue("https://cran.rstudio.org/web/checks/check_results_{package}.html") } news_latest <- function(lines) { headings <- which(grepl("^#\\s+", lines)) if (length(headings) == 0) { ui_abort("No top-level headings found in {.path {pth('NEWS.md')}}.") } else if (length(headings) == 1) { news <- lines[seq2(headings + 1, length(lines))] } else { news <- lines[seq2(headings[[1]] + 1, headings[[2]] - 1)] } # Remove leading and trailing empty lines text <- which(news != "") if (length(text) == 0) { return("") } news <- news[text[[1]]:text[[length(text)]]] paste0(news, "\n", collapse = "") } is_posit_pkg <- function() { is_posit_cph_or_fnd() || is_in_posit_org() } is_posit_cph_or_fnd <- function() { if (!is_package()) { return(FALSE) } roles <- get_posit_roles() "cph" %in% roles || "fnd" %in% roles } is_posit_person_canonical <- function() { if (!is_package()) { return(FALSE) } roles <- get_posit_roles() length(roles) > 0 && "fnd" %in% roles && "cph" %in% roles && attr(roles, "appears_in", exact = TRUE) == "given" && attr(roles, "appears_as", exact = TRUE) == "Posit Software, PBC" } get_posit_roles <- function() { if (!is_package()) { return() } desc <- proj_desc() fnd <- unclass(desc$get_author("fnd")) cph <- unclass(desc$get_author("cph")) detect_posit <- function(x) { any(grepl("rstudio|posit", tolower(x[c("given", "family")]))) } fnd <- purrr::keep(fnd, detect_posit) cph <- purrr::keep(cph, detect_posit) if (length(fnd) < 1 && length(cph) < 1) { return(character()) } person <- c(fnd, cph)[[1]] out <- person$role if (!is.null(person$given) && nzchar(person$given)) { attr(out, "appears_as") <- person$given attr(out, "appears_in") <- "given" } else { attr(out, "appears_as") <- person$family attr(out, "appears_in") <- "family" } out } is_in_posit_org <- function() { if (!is_package()) { return(FALSE) } desc <- proj_desc() urls <- desc$get_urls() dat <- parse_github_remotes(urls) dat <- dat[dat$host == "github.com", ] purrr::some(dat$repo_owner, ~ .x %in% posit_orgs()) } posit_orgs <- function() { c( "tidyverse", "r-lib", "tidymodels", "rstudio" ) } todo <- function(x, cond = TRUE) { x <- glue(x, .envir = parent.frame()) if (cond) { paste0("* [ ] ", x) } } author_has_rstudio_email <- function() { if (!is_package()) { return() } desc <- proj_desc() any(grepl("@rstudio[.]com", tolower(desc$get_authors()))) } pkg_minimum_r_version <- function() { deps <- proj_desc()$get_deps() r_dep <- deps[deps$package == "R" & deps$type == "Depends", "version"] if (length(r_dep) > 0) { numeric_version(gsub("[^0-9.]", "", r_dep)) } else { NA_character_ } } # Borrowed from pak, but modified also retain user's non-cran repos: # https://github.com/r-lib/pak/blob/168ab5d58fc244e5084c2800c87b8a574d66c3ba/R/default-cran-mirror.R default_cran_mirror <- function() { repos <- getOption("repos") cran <- repos["CRAN"] if (is.null(cran) || is.na(cran) || cran == "@CRAN@") { repos["CRAN"] <- "https://cloud.r-project.org" } repos } usethis/R/helpers.R0000644000176200001440000000650414717524721013743 0ustar liggesusersuse_dependency <- function(package, type, min_version = NULL) { check_name(package) check_name(type) if (package != "R") { check_installed(package) } if (package == "R" && tolower(type) != "depends") { ui_abort('Set {.code type = "Depends"} when specifying an R version.') } else if (package == "R" && is.null(min_version)) { ui_abort('Specify {.arg min_version} when {.code package = "R"}.') } if (isTRUE(min_version) && package == "R") { min_version <- r_version() } else if (isTRUE(min_version)) { min_version <- utils::packageVersion(package) } version <- if (is.null(min_version)) "*" else glue(">= {min_version}") types <- c("Depends", "Imports", "Suggests", "Enhances", "LinkingTo") names(types) <- tolower(types) type <- types[[match.arg(tolower(type), names(types))]] desc <- proj_desc() deps <- desc$get_deps() deps <- deps[deps$package == package, ] new_linking_to <- type == "LinkingTo" && !"LinkingTo" %in% deps$type new_non_linking_to <- type != "LinkingTo" && identical(deps$type, "LinkingTo") changed <- FALSE # One of: # * No existing dependency on this package # * Adding existing non-LinkingTo dependency to LinkingTo # * First use of a LinkingTo package as a non-LinkingTo dependency # In all cases, we can can simply make the change. if (nrow(deps) == 0 || new_linking_to || new_non_linking_to) { ui_bullets(c( "v" = "Adding {.pkg {package}} to {.field {type}} field in DESCRIPTION." )) desc$set_dep(package, type, version = version) desc$write() changed <- TRUE return(invisible(changed)) } if (type == "LinkingTo") { deps <- deps[deps$type == "LinkingTo", ] } else { deps <- deps[deps$type != "LinkingTo", ] } existing_type <- deps$type existing_version <- deps$version delta <- sign(match(existing_type, types) - match(type, types)) if (delta < 0) { # don't downgrade ui_bullets(c( "!" = "Package {.pkg {package}} is already listed in {.field {existing_type}} in DESCRIPTION; no change made." )) } else if (delta == 0 && version_spec(version) != version_spec(existing_version)) { if (version_spec(version) > version_spec(existing_version)) { direction <- "Increasing" } else { direction <- "Decreasing" } ui_bullets(c( "v" = "{direction} {.pkg {package}} version to {.val {version}} in DESCRIPTION." )) desc$set_dep(package, type, version = version) desc$write() changed <- TRUE } else if (delta > 0) { # moving from, e.g., Suggests to Imports ui_bullets(c( "v" = "Moving {.pkg {package}} from {.field {existing_type}} to {.field {type}} field in DESCRIPTION." )) desc$del_dep(package, existing_type) desc$set_dep(package, type, version = version) desc$write() changed <- TRUE } invisible(changed) } r_version <- function() { version <- getRversion() glue("{version$major}.{version$minor}") } version_spec <- function(x) { if (x == "*") x <- "0" x <- gsub("(<=|<|>=|>|==)\\s*", "", x) numeric_version(x) } view_url <- function(..., open = is_interactive()) { url <- paste(..., sep = "/") if (open) { ui_bullets(c("v" = "Opening URL {.url {url}}.")) utils::browseURL(url) } else { ui_bullets(c("_" = "Open URL {.url {url}}.")) } invisible(url) } usethis/R/line-ending.R0000644000176200001440000000212314651000165014447 0ustar liggesusersproj_line_ending <- function() { # First look in .Rproj file proj_path <- proj_path(paste0(project_name(), ".Rproj")) if (file_exists(proj_path)) { config <- read_utf8(proj_path) if (any(grepl("^LineEndingConversion: Posix", config))) { return("\n") } else if (any(grepl("^LineEndingConversion: Windows", config))) { return("\r\n") } } # Then try DESCRIPTION desc_path <- proj_path("DESCRIPTION") if (file_exists(desc_path)) { return(detect_line_ending(desc_path)) } # Then try any .R file r_path <- proj_path("R") if (dir_exists(r_path)) { r_files <- dir_ls(r_path, regexp = "[.][rR]$") if (length(r_files) > 0) { return(detect_line_ending(r_files[[1]])) } } # Then give up - this is used (for example), when writing the # first file into the package platform_line_ending() } platform_line_ending <- function() { if (.Platform$OS.type == "windows") "\r\n" else "\n" } detect_line_ending <- function(path) { samp <- suppressWarnings(readChar(path, nchars = 500)) if (isTRUE(grepl("\r\n", samp))) "\r\n" else "\n" } usethis/R/use_standalone.R0000644000176200001440000001661214717524762015313 0ustar liggesusers#' Use a standalone file from another repo #' #' @description #' A "standalone" file implements a minimum set of functionality in such a way #' that it can be copied into another package. `use_standalone()` makes it easy #' to get such a file into your own repo. #' #' It always overwrites an existing standalone file of the same name, making #' it easy to update previously imported code. #' #' @section Supported fields: #' #' A standalone file has YAML frontmatter that provides additional information, #' such as where the file originates from and when it was last updated. Here is #' an example: #' #' ``` #' --- #' repo: r-lib/rlang #' file: standalone-types-check.R #' last-updated: 2023-03-07 #' license: https://unlicense.org #' dependencies: standalone-obj-type.R #' imports: rlang (>= 1.1.0) #' --- #' ``` #' #' Two of these fields are consulted by `use_standalone()`: #' #' - `dependencies`: A file or a list of files in the same repo that #' the standalone file depends on. These files are retrieved #' automatically by `use_standalone()`. #' #' - `imports`: A package or list of packages that the standalone file #' depends on. A minimal version may be specified in parentheses, #' e.g. `rlang (>= 1.0.0)`. These dependencies are passed to #' [use_package()] to ensure they are included in the `Imports:` #' field of the `DESCRIPTION` file. #' #' Note that lists are specified with standard YAML syntax, using #' square brackets, for example: `imports: [rlang (>= 1.0.0), purrr]`. #' #' @inheritParams create_from_github #' @inheritParams use_github_file #' @param file Name of standalone file. The `standalone-` prefix and file #' extension are optional. If omitted, will allow you to choose from the #' standalone files offered by that repo. #' @export #' @examples #' \dontrun{ #' use_standalone("r-lib/rlang", file = "types-check") #' use_standalone("r-lib/rlang", file = "types-check", ref = "standalone-dep") #' } use_standalone <- function(repo_spec, file = NULL, ref = NULL, host = NULL) { check_is_project() maybe_name(file) maybe_name(host) maybe_name(ref) parsed_repo_spec <- parse_repo_url(repo_spec) if (!is.null(parsed_repo_spec$host)) { repo_spec <- parsed_repo_spec$repo_spec host <- parsed_repo_spec$host } if (is.null(file)) { file <- standalone_choose(repo_spec, ref = ref, host = host) } else { file <- as_standalone_file(file) } src_path <- path("R", file) dest_path <- path("R", as_standalone_dest_file(file)) lines <- read_github_file(repo_spec, path = src_path, ref = ref, host = host) lines <- c(standalone_header(repo_spec, src_path, ref, host), lines) write_over(proj_path(dest_path), lines, overwrite = TRUE) dependencies <- standalone_dependencies(lines, path) for (dependency in dependencies$deps) { use_standalone(repo_spec, dependency, ref = ref, host = host) } imports <- dependencies$imports for (i in seq_len(nrow(imports))) { import <- imports[i, , drop = FALSE] if (is.na(import$ver)) { ver <- NULL } else { ver <- import$ver } ui_silence( use_package(import$pkg, min_version = ver) ) } invisible() } standalone_choose <- function(repo_spec, ref = NULL, host = NULL, error_call = caller_env()) { json <- gh::gh( "/repos/{repo_spec}/contents/{path}", repo_spec = repo_spec, ref = ref, .api_url = host, path = "R/" ) names <- map_chr(json, "name") names <- names[grepl("^standalone-", names)] choices <- gsub("^standalone-|.[Rr]$", "", names) if (length(choices) == 0) { cli::cli_abort( "No standalone files found in {repo_spec}.", call = error_call ) } if (!is_interactive()) { cli::cli_abort( c( "`file` is absent, but must be supplied.", i = "Possible options are {.or {choices}}." ), call = error_call ) } choice <- utils::menu( choices = choices, title = "Which standalone file do you want to use (0 to exit)?" ) if (choice == 0) { cli::cli_abort("Selection cancelled", call = error_call) } names[[choice]] } as_standalone_file <- function(file) { if (path_ext(file) == "") { file <- unclass(path_ext_set(file, "R")) } if (!grepl("standalone-", file)) { file <- paste0("standalone-", file) } file } as_standalone_dest_file <- function(file) { gsub("standalone-", "import-standalone-", file) } standalone_header <- function(repo_spec, path, ref = NULL, host = NULL) { ref_string <- ref %||% "HEAD" host_string <- host %||% "https://github.com" source_comment <- glue("# Source: {host_string}/{repo_spec}/blob/{ref_string}/{path}") path_string <- path_ext_remove(sub("^standalone-", "", path_file(path))) ref_string <- if (is.null(ref)) "" else glue(', ref = "{ref}"') host_string <- if (is.null(host) || host == "https://github.com") "" else glue(', host = "{host}"') code_hint <- glue('usethis::use_standalone("{repo_spec}", "{path_string}"{ref_string}{host_string})') generated_comment <- glue('# Generated by: {code_hint}') c( "# Standalone file: do not edit by hand", source_comment, generated_comment, paste0("# ", strrep("-", 72 - 2)), "#" ) } standalone_dependencies <- function(lines, path, error_call = caller_env()) { dividers <- which(lines == "# ---") if (length(dividers) != 2) { cli::cli_abort( "Can't find yaml metadata in {.path {path}}.", call = error_call ) } header <- lines[dividers[[1]]:dividers[[2]]] header <- gsub("^# ", "", header) temp <- withr::local_tempfile(lines = header) yaml <- rmarkdown::yaml_front_matter(temp) as_chr_field <- function(field) { if (!is.null(field) && !is.character(field)) { cli::cli_abort( "Invalid dependencies specification in {.path {path}}.", call = error_call ) } field %||% character() } deps <- as_chr_field(yaml$dependencies) imports <- as_chr_field(yaml$imports) imports <- as_version_info(imports, error_call = error_call) if (any(stats::na.omit(imports$cmp) != ">=")) { cli::cli_abort( "Version specification must use {.code >=}.", call = error_call ) } list(deps = deps, imports = imports) } as_version_info <- function(fields, error_call = caller_env()) { if (!length(fields)) { return(version_info_df()) } if (any(grepl(",", fields))) { msg <- c( "Version field can't contain comma.", "i" = "Do you need to wrap in a list?" ) cli::cli_abort(msg, call = error_call) } info <- lapply(fields, as_version_info_row, error_call = error_call) inject(rbind(!!!info)) } as_version_info_row <- function(field, error_call = caller_env()) { version_regex <- "(.*) \\((.*)\\)$" has_ver <- grepl(version_regex, field) if (!has_ver) { return(version_info_df(field, NA, NA)) } pkg <- sub(version_regex, "\\1", field) ver <- sub(version_regex, "\\2", field) ver <- strsplit(ver, " ")[[1]] if (!is_character(ver, n = 2) || anyNA(ver) || !all(nzchar(ver))) { cli::cli_abort( c( "Can't parse version `{field}` in `imports:` field.", "i" = "Example of expected version format: `rlang (>= 1.0.0)`." ), call = error_call ) } version_info_df(pkg, ver[[1]], ver[[2]]) } version_info_df <- function(pkg = chr(), cmp = chr(), ver = chr()) { df <- data.frame( pkg = as.character(pkg), cmp = as.character(cmp), ver = as.character(ver) ) structure(df, class = c("tbl", "data.frame")) } usethis/R/proj-desc.R0000644000176200001440000000271414720134716014161 0ustar liggesusersproj_desc <- function(path = proj_get()) { desc::desc(file = path) } proj_version <- function() { proj_desc()$get_field("Version") } proj_deps <- function() { proj_desc()$get_deps() } proj_desc_create <- function(name, fields = list(), roxygen = TRUE) { fields <- use_description_defaults(name, roxygen = roxygen, fields = fields) # https://github.com/r-lib/desc/issues/132 desc <- desc::desc(text = glue("{names(fields)}: {fields}")) tidy_desc(desc) tf <- withr::local_tempfile() desc$write(file = tf) write_over(proj_path("DESCRIPTION"), read_utf8(tf)) # explicit check of "usethis.quiet" since I'm not doing the printing if (!is_quiet()) { desc$print() } } # Here overwrite means "update the field if there is already a value in it, # including appending". proj_desc_field_update <- function(key, value, overwrite = TRUE, append = FALSE) { check_string(key) check_character(value) check_bool(overwrite) desc <- proj_desc() old <- desc$get_list(key, default = "") if (all(value %in% old)) { return(invisible()) } if (!overwrite && length(old) > 0 && any(old != "")) { ui_abort(" {.field {key}} has a different value in DESCRIPTION. Use {.code overwrite = TRUE} to overwrite.") } ui_bullets(c("v" = "Adding {.val {value}} to {.field {key}}.")) if (append) { value <- union(old, value) } # https://github.com/r-lib/desc/issues/117 desc$set_list(key, value) desc$write() invisible() } usethis/R/vscode.R0000644000176200001440000000266514651000165013554 0ustar liggesusers# unexported function we are experimenting with use_vscode_debug <- function(open = rlang::is_interactive()) { usethis::use_directory(".vscode", ignore = TRUE) deps <- proj_deps() lt_pkgs <- deps$package[deps$type == "LinkingTo"] possibly_path_package <- purrr::possibly(path_package, otherwise = NA) lt_paths <- map_chr(lt_pkgs, ~ possibly_path_package(.x, "include")) lt_paths <- purrr::discard(lt_paths, is.na) # this is a bit fiddly, but it produces the desired JSON when lt_paths has # length 0 or > 0 # I should probably come back and use jsonlite here instead of use_template() lt_paths <- encodeString(lt_paths, quote = '"') lt_paths <- glue(" {lt_paths},") lt_paths <- glue_collapse(lt_paths, sep = "\n") if (length(lt_paths) > 0) { lt_paths <- paste0("\n", lt_paths) } use_template( "vscode-c_cpp_properties.json", save_as = path(".vscode", "c_cpp_properties.json"), data = list(linking_to_includes = lt_paths), ignore = FALSE, # the .vscode directory is already ignored open = open ) use_template( "vscode-launch.json", save_as = path(".vscode", "launch.json"), ignore = FALSE, # the .vscode directory is already ignored open = open ) usethis::use_directory("debug", ignore = TRUE) use_template( "vscode-debug.R", save_as = path("debug", "debug.R"), ignore = FALSE, # the debug directory is already ignored open = open ) invisible(TRUE) } usethis/R/cpp11.R0000644000176200001440000000215614717524721013224 0ustar liggesusers#' Use C++ via the cpp11 package #' #' Adds infrastructure needed to use the [cpp11](https://cpp11.r-lib.org) #' package, a header-only R package that helps R package developers handle R #' objects with C++ code: #' * Creates `src/` #' * Adds cpp11 to `DESCRIPTION` #' * Creates `src/code.cpp`, an initial placeholder `.cpp` file #' #' @export use_cpp11 <- function() { check_is_package("use_cpp11()") check_installed("cpp11") check_uses_roxygen("use_cpp11()") check_has_package_doc("use_cpp11()") use_src() use_dependency("cpp11", "LinkingTo") use_template( "code-cpp11.cpp", path("src", "code.cpp"), open = is_interactive() ) check_cpp_register_deps() invisible() } get_cpp_register_deps <- function() { desc <- desc::desc(package = "cpp11") desc$get_list("Config/Needs/cpp11/cpp_register")[[1]] } check_cpp_register_deps <- function() { cpp_register_deps <- get_cpp_register_deps() installed <- map_lgl(cpp_register_deps, is_installed) if (!all(installed)) { ui_bullets(c( "_" = "Now install {.pkg {cpp_register_deps[!installed]}} to use {.pkg cpp11}." )) } } usethis/R/rcpp.R0000644000176200001440000000554014651000165013230 0ustar liggesusers#' Use C, C++, RcppArmadillo, or RcppEigen #' #' Adds infrastructure commonly needed when using compiled code: #' * Creates `src/` #' * Adds required packages to `DESCRIPTION` #' * May create an initial placeholder `.c` or `.cpp` file #' * Creates `Makevars` and `Makevars.win` files (`use_rcpp_armadillo()` only) #' #' @inheritParams use_r #' @export use_rcpp <- function(name = NULL) { check_is_package("use_rcpp()") check_uses_roxygen("use_rcpp()") use_dependency("Rcpp", "LinkingTo") use_dependency("Rcpp", "Imports") roxygen_ns_append("@importFrom Rcpp sourceCpp") && roxygen_remind() use_src() path <- path("src", compute_name(name, "cpp")) use_template("code.cpp", path) edit_file(proj_path(path)) invisible() } #' @rdname use_rcpp #' @export use_rcpp_armadillo <- function(name = NULL) { use_rcpp(name) use_dependency("RcppArmadillo", "LinkingTo") makevars_settings <- list( "CXX_STD" = "CXX11", "PKG_CXXFLAGS" = "$(SHLIB_OPENMP_CXXFLAGS)", "PKG_LIBS" = "$(SHLIB_OPENMP_CXXFLAGS) $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS)" ) use_makevars(makevars_settings) invisible() } #' @rdname use_rcpp #' @export use_rcpp_eigen <- function(name = NULL) { use_rcpp(name) use_dependency("RcppEigen", "LinkingTo") roxygen_ns_append("@import RcppEigen") && roxygen_remind() invisible() } #' @rdname use_rcpp #' @export use_c <- function(name = NULL) { check_is_package("use_c()") check_uses_roxygen("use_c()") use_src() path <- path("src", compute_name(name, ext = "c")) use_template("code.c", path) edit_file(proj_path(path)) invisible(TRUE) } use_src <- function() { use_directory("src") use_git_ignore(c("*.o", "*.so", "*.dll"), "src") roxygen_ns_append(glue("@useDynLib {project_name()}, .registration = TRUE")) && roxygen_remind() invisible() } use_makevars <- function(settings = NULL) { use_directory("src") settings_list <- settings %||% list() check_is_named_list(settings_list) makevars_entries <- vapply(settings_list, glue_collapse, character(1)) makevars_content <- glue("{names(makevars_entries)} = {makevars_entries}") makevars_path <- proj_path("src", "Makevars") makevars_win_path <- proj_path("src", "Makevars.win") if (!file_exists(makevars_path) && !file_exists(makevars_win_path)) { write_utf8(makevars_path, makevars_content) file_copy(makevars_path, makevars_win_path) ui_bullets(c( "v" = "Created {.path {pth(makevars_path)}} and {.path {pth(makevars_win_path)}} with requested compilation settings." )) } else { ui_bullets(c( "_" = "Ensure the following Makevars compilation settings are set for both {.path {pth(makevars_path)}} and {.path {pth(makevars_win_path)}}:" )) ui_code_snippet( makevars_content, language = "" ) edit_file(makevars_path) edit_file(makevars_win_path) } } usethis/R/upkeep.R0000644000176200001440000002512214717524762013574 0ustar liggesusers#' Create an upkeep checklist in a GitHub issue #' #' @description #' This opens an issue in your package repository with a checklist of tasks for #' regular maintenance of your package. This is a fairly opinionated list of #' tasks but we believe taking care of them will generally make your package #' better, easier to maintain, and more enjoyable for your users. Some of the #' tasks are meant to be performed only once (and once completed shouldn't show #' up in subsequent lists), and some should be reviewed periodically. The #' tidyverse team uses a similar function [use_tidy_upkeep_issue()] for our #' annual package Spring Cleaning. #' #' @param year Year you are performing the upkeep, used in the issue title. #' Defaults to current year #' #' @export #' @examples #' \dontrun{ #' use_upkeep_issue() #' } use_upkeep_issue <- function(year = NULL) { make_upkeep_issue(year = year, tidy = FALSE) } make_upkeep_issue <- function(year, last_upkeep, tidy) { who <- if (tidy) "use_tidy_upkeep_issue()" else "use_upkeep_issue()" check_is_package(who) tr <- target_repo(github_get = TRUE) if (!isTRUE(tr$can_push)) { ui_bullets(c( "!" = "It is very unusual to open an upkeep issue on a repo you can't push to ({.val {tr$repo_spec}})." )) if (ui_nah("Do you really want to do this?")) { ui_bullets(c("x" = "Cancelling.")) return(invisible()) } } gh <- gh_tr(tr) if (tidy) { checklist <- tidy_upkeep_checklist(last_upkeep, repo_spec = tr$repo_spec) } else { checklist <- upkeep_checklist(tr) } title_year <- year %||% format(Sys.Date(), "%Y") issue <- gh( "POST /repos/{owner}/{repo}/issues", title = glue("Upkeep for {project_name()} ({title_year})"), body = paste0(checklist, "\n", collapse = ""), labels = if (tidy) list("upkeep") ) Sys.sleep(1) view_url(issue$html_url) } upkeep_checklist <- function(target_repo = NULL) { has_github_links <- has_github_links(target_repo) bullets <- c( todo("`usethis::use_readme_rmd()`", !file_exists(proj_path("README.Rmd"))), todo("`usethis::use_roxygen_md()`", !is_true(uses_roxygen_md())), todo("`usethis::use_github_links()`", !has_github_links), todo("`usethis::use_pkgdown_github_pages()`", !uses_pkgdown()), todo("`usethis::use_tidy_description()`"), todo( " `usethis::use_package_doc()` Consider letting usethis manage your `@importFrom` directives here. \\ `usethis::use_import_from()` is handy for this.", !has_package_doc() ), todo( " `usethis::use_testthat()`. \\ Learn more about testing at ", !uses_testthat() ), todo( " `usethis::use_testthat(3)` and upgrade to 3e, \\ [testthat 3e vignette](https://testthat.r-lib.org/articles/third-edition.html)", uses_old_testthat_edition(current = 3) ), todo(" Align the names of `R/` files and `test/` files for workflow happiness. \\ The docs for `usethis::use_r()` include a helpful script. \\ `usethis::rename_files()` may be be useful."), todo( "Consider changing default branch from `master` to `main`", git_default_branch() == "master" ), todo("`usethis::use_code_of_conduct()`", !has_coc()), todo( "Remove description of test environments from `cran-comments.md`. See `usethis::use_cran_comments()`.", has_old_cran_comments() ), todo(" Add alt-text to pictures, plots, etc; see \\ for examples"), "", "Set up or update GitHub Actions. \\ Updating workflows to the latest version will often fix troublesome actions:", todo("`usethis::use_github_action('check-standard')`"), todo("`usethis::use_github_action('pkgdown')`", uses_pkgdown()), todo("`usethis::use_github_action('test-coverage')`", uses_testthat()) ) c(bullets, upkeep_extra_bullets(), checklist_footer(tidy = FALSE)) } # tidyverse upkeep issue ------------------------------------------------------- #' @export #' @rdname tidyverse #' @param last_upkeep Year of last upkeep. By default, the #' `Config/usethis/last-upkeep` field in `DESCRIPTION` is consulted for this, if #' it's defined. If there's no information on the last upkeep, the issue will #' contain the full checklist. use_tidy_upkeep_issue <- function(last_upkeep = last_upkeep_year()) { make_upkeep_issue(year = NULL, last_upkeep = last_upkeep, tidy = TRUE) record_upkeep_date(Sys.Date()) } # for mocking Sys.Date <- NULL tidy_upkeep_checklist <- function(last_upkeep = last_upkeep_year(), repo_spec = "OWNER/REPO") { desc <- proj_desc() posit_pkg <- is_posit_pkg() posit_person_ok <- is_posit_person_canonical() bullets <- c( "### To begin", "", todo('`pr_init("upkeep-{format(Sys.Date(), "%Y-%m")}")`'), "" ) if (last_upkeep <= 2000) { bullets <- c( bullets, "### Pre-history", "", todo("`usethis::use_readme_rmd()`"), todo("`usethis::use_roxygen_md()`"), todo("`usethis::use_github_links()`"), todo("`usethis::use_pkgdown_github_pages()`"), todo("`usethis::use_tidy_github_labels()`"), todo("`usethis::use_tidy_style()`"), todo("`urlchecker::url_check()`"), "" ) } if (last_upkeep <= 2020) { bullets <- c( bullets, "### 2020", "", todo("`usethis::use_package_doc()`"), todo("`usethis::use_testthat(3)`"), todo("Align the names of `R/` files and `test/` files"), "" ) } if (last_upkeep <= 2021) { bullets <- c( bullets, "### 2021", "", todo("Remove check environments section from `cran-comments.md`"), todo("Use lifecycle instead of artisanal deprecation messages"), "" ) } if (last_upkeep <= 2022) { bullets <- c( bullets, "### 2022", "", todo("Handle and close any still-open `master` --> `main` issues"), todo('`usethis:::use_codecov_badge("{repo_spec}")`'), todo("Update pkgdown site using instructions at "), todo("Update lifecycle badges with more accessible SVGs: `usethis::use_lifecycle()`"), "" ) } if (last_upkeep <= 2023) { bullets <- c( bullets, "### 2023", "", todo( " Update email addresses *@rstudio.com -> *@posit.co", author_has_rstudio_email() ), todo( ' Update copyright holder in DESCRIPTION: \\ `person("Posit Software, PBC", role = c("cph", "fnd"))`', posit_pkg && !posit_person_ok ), todo( " Run `devtools::document()` to re-generate package-level help topic \\ with DESCRIPTION changes", author_has_rstudio_email() || (posit_pkg && !posit_person_ok) ), todo("`usethis::use_tidy_logo(); pkgdown::build_favicons(overwrite = TRUE)`"), todo("`usethis::use_tidy_coc()`"), todo( "Modernize citation files; see updated `use_citation()`", has_citation_file() ), todo('Use `pak::pak("{repo_spec}")` in README'), todo(" Consider running `usethis::use_tidy_dependencies()` and/or \\ replace compat files with `use_standalone()`"), todo("Use cli errors or [file an issue](new) if you don\'t have time to do it now"), todo(' `usethis::use_standalone("r-lib/rlang", "types-check")` \\ instead of home grown argument checkers; or [file an issue](new) if you don\'t have time to do it now'), todo( " Change files ending in `.r` to `.R` in `R/` and/or `tests/testthat/`", lowercase_r() ), todo(" Add alt-text to pictures, plots, etc; see \\ https://posit.co/blog/knitr-fig-alt/ for examples" ), "" ) } minimum_r_version <- pkg_minimum_r_version() bullets <- c( bullets, "### To finish", "", todo("`usethis::use_mit_license()`", grepl("MIT", desc$get_field("License"))), todo( '`usethis::use_package("R", "Depends", "{tidy_minimum_r_version()}")`', is.na(minimum_r_version) || tidy_minimum_r_version() > minimum_r_version ), todo("`usethis::use_tidy_description()`"), todo("`usethis::use_tidy_github_actions()`"), todo("`devtools::build_readme()`"), todo("[Re-publish released site](https://pkgdown.r-lib.org/dev/articles/how-to-update-released-site.html) if needed"), "" ) c(bullets, checklist_footer(tidy = TRUE)) } # upkeep helpers ---------------------------------------------------------- # https://www.tidyverse.org/blog/2019/04/r-version-support/ tidy_minimum_r_version <- function() { con <- curl::curl("https://api.r-hub.io/rversions/r-oldrel/4") withr::defer(close(con)) # I do not want a failure here to make use_tidy_upkeep_issue() fail json <- tryCatch(readLines(con, warn = FALSE), error = function(e) NULL) if (is.null(json)) { oldrel_4 <- "3.6" } else { version <- jsonlite::fromJSON(json)$version oldrel_4 <- re_match(version, "[0-9]+[.][0-9]+")$.match } numeric_version(oldrel_4) } lowercase_r <- function() { path <- proj_path(c("R", "tests")) path <- path[fs::dir_exists(path)] any(fs::path_ext(fs::dir_ls(path, recurse = TRUE)) == "r") } has_coc <- function() { path <- proj_path(c(".", ".github"), "CODE_OF_CONDUCT.md") any(file_exists(path)) } has_citation_file <- function() { file_exists(proj_path("inst/CITATION")) } uses_old_testthat_edition <- function(current) { if (!requireNamespace("testthat", quietly = TRUE)) { return(FALSE) } uses_testthat() && testthat::edition_get() < current } upkeep_extra_bullets <- function(env = NULL) { env <- env %||% safe_pkg_env() if (env_has(env, "upkeep_bullets")) { c(paste0("* [ ] ", env$upkeep_bullets()), "") } else { "" } } checklist_footer <- function(tidy) { tidy_fun <- if (tidy) "tidy_" else "" glue('\\ Created on {Sys.Date()} with `usethis::use_{tidy_fun}upkeep_issue()`, using \\ [usethis v{usethis_version()}](https://usethis.r-lib.org)\\ ') } usethis_version <- function() { utils::packageVersion("usethis") } has_old_cran_comments <- function() { cc <- proj_path("cran-comments.md") file_exists(cc) && any(grepl("# test environment", readLines(cc), ignore.case = TRUE)) } last_upkeep_date <- function() { as.Date( proj_desc()$get_field("Config/usethis/last-upkeep", "2000-01-01"), format = "%Y-%m-%d" ) } last_upkeep_year <- function() { as.integer(format(last_upkeep_date(), "%Y")) } record_upkeep_date <- function(date) { proj_desc_field_update("Config/usethis/last-upkeep", format(date, "%Y-%m-%d")) } usethis/R/author.R0000644000176200001440000001137114717524762013606 0ustar liggesusers#' Add an author to the `Authors@R` field in DESCRIPTION #' #' @description #' `use_author()` adds a person to the `Authors@R` field of the DESCRIPTION #' file, creating that field if necessary. It will not modify, e.g., the role(s) #' or email of an existing author (judged using their "Given Family" name). For #' that we recommend editing DESCRIPTION directly. Or, for programmatic use, #' consider calling the more specialized functions available in the \pkg{desc} #' package directly. #' #' `use_author()` also surfaces two other situations you might want to address: #' * Explicit use of the fields `Author` or `Maintainer`. We recommend switching #' to the more modern `Authors@R` field instead, because it offers richer #' metadata for various downstream uses. (Note that `Authors@R` is *eventually* #' processed to create `Author` and `Maintainer` fields, but only when the #' `tar.gz` is built from package source.) #' * Presence of the fake author placed by [create_package()] and #' [use_description()]. This happens when \pkg{usethis} has to create a #' DESCRIPTION file and the user hasn't given any author information via the #' `fields` argument or the global option `"usethis.description"`. The #' placeholder looks something like `First Last [aut, #' cre]` and `use_author()` offers to remove it in interactive sessions. #' #' @inheritParams utils::person #' @inheritDotParams utils::person #' @export #' @examples #' \dontrun{ #' use_author( #' given = "Lucy", #' family = "van Pelt", #' role = c("aut", "cre"), #' email = "lucy@example.com", #' comment = c(ORCID = "LUCY-ORCID-ID") #' ) #' #' use_author("Charlie", "Brown") #' } #' use_author <- function(given = NULL, family = NULL, ..., role = "ctb") { check_is_package("use_author()") maybe_name(given) maybe_name(family) check_character(role) d <- proj_desc() challenge_legacy_author_fields(d) # We only need to consider Authors@R authors_at_r_already <- d$has_fields("Authors@R") if (authors_at_r_already) { check_author_is_novel(given, family, d) } # This person is not already in Authors@R author <- utils::person(given = given, family = family, role = role, ...) aut_fmt <- format(author, style = 'text') if (authors_at_r_already) { ui_bullets(c( "v" = "Adding to {.field Authors@R} in DESCRIPTION:", " " = "{aut_fmt}" )) } else { ui_bullets(c( "v" = "Creating {.field Authors@R} field in DESCRIPTION and adding:", " " = "{aut_fmt}" )) } d$add_author(given = given, family = family, role = role, ...) challenge_default_author(d) d$write() invisible(TRUE) } challenge_legacy_author_fields <- function(d = proj_desc()) { has_legacy_field <- d$has_fields("Author") || d$has_fields("Maintainer") if (!has_legacy_field) { return(invisible()) } ui_bullets(c( "x" = "Found legacy {.field Author} and/or {.field Maintainer} field in DESCRIPTION.", " " = "usethis only supports modification of the {.field Authors@R} field.", "i" = "We recommend one of these paths forward:", "_" = "Delete the legacy fields and rebuild with {.fun use_author}; or", "_" = "Convert to {.field Authors@R} with {.fun desc::desc_coerce_authors_at_r}, then delete the legacy fields." )) if (ui_yep("Do you want to cancel this operation and sort that out first?")) { ui_abort("Cancelling.") } invisible() } check_author_is_novel <- function(given = NULL, family = NULL, d = proj_desc()) { authors <- d$get_authors() authors_given <- purrr::map(authors, "given") authors_family <- purrr::map(authors, "family") m <- purrr::map2_lgl(authors_given, authors_family, function(x, y) { identical(x, given) && identical(y, family) }) if (any(m)) { aut_name <- glue("{given %||% ''} {family %||% ''}") ui_abort(c( "x" = "{.val {aut_name}} already appears in {.field Authors@R}.", " " = "Please make the desired change directly in DESCRIPTION or call the {.pkg desc} package directly." )) } invisible() } challenge_default_author <- function(d = proj_desc()) { defaults <- usethis_description_defaults() default_author <- eval(parse(text = defaults[["Authors@R"]])) authors <- d$get_authors() m <- map_lgl( authors, # the `person` class is pretty weird! function(x) identical(x, unclass(default_author)[[1]]) ) if (any(m)) { ui_bullets(c( "i" = "{.field Authors@R} appears to include a placeholder author:", " " = "{format(default_author, style = 'text')}" )) if(is_interactive() && ui_yep("Would you like to remove it?")) { # TODO: Do I want to suppress this output? # Authors removed: First Last, NULL NULL. do.call(d$del_author, unclass(default_author)[[1]]) } } return(invisible()) } usethis/R/pkgdown.R0000644000176200001440000001201214651000165013725 0ustar liggesusers#' Use pkgdown #' #' @description #' [pkgdown](https://pkgdown.r-lib.org) makes it easy to turn your package into #' a beautiful website. usethis provides two functions to help you use pkgdown: #' #' * `use_pkgdown()`: creates a pkgdown config file and adds relevant files or #' directories to `.Rbuildignore` and `.gitignore`. #' #' * `use_pkgdown_github_pages()`: implements the GitHub setup needed to #' automatically publish your pkgdown site to GitHub pages: #' #' - (first, it calls `use_pkgdown()`) #' - [use_github_pages()] prepares to publish the pkgdown site from the #' `gh-pages` branch #' - [`use_github_action("pkgdown")`][use_github_action()] configures a #' GitHub Action to automatically build the pkgdown site and deploy it via #' GitHub Pages #' - The pkgdown site's URL is added to the pkgdown configuration file, #' to the URL field of DESCRIPTION, and to the GitHub repo. #' - Packages owned by certain GitHub organizations (tidyverse, r-lib, and #' tidymodels) get some special treatment, in terms of anticipating the #' (eventual) site URL and the use of a pkgdown template. #' #' @seealso #' @param config_file Path to the pkgdown yaml config file, relative to the #' project. #' @param destdir Target directory for pkgdown docs. #' @export use_pkgdown <- function(config_file = "_pkgdown.yml", destdir = "docs") { check_is_package("use_pkgdown()") check_installed("pkgdown") use_build_ignore(c(config_file, destdir, "pkgdown")) use_git_ignore(destdir) config <- pkgdown_config(destdir) config_path <- proj_path(config_file) write_over(config_path, yaml::as.yaml(config)) edit_file(config_path) invisible(TRUE) } pkgdown_config <- function(destdir) { config <- list( url = NULL ) if (pkgdown_version() >= "1.9000") { config$template <- list(bootstrap = 5L) } if (!identical(destdir, "docs")) { config$destination <- destdir } config } # wrapping because I need to be able to mock this in tests pkgdown_version <- function() { utils::packageVersion("pkgdown") } #' @rdname use_pkgdown #' @export use_pkgdown_github_pages <- function() { tr <- target_repo(github_get = TRUE, ok_configs = c("ours", "fork")) check_can_push(tr = tr, "to turn on GitHub Pages") use_pkgdown() site <- use_github_pages() use_github_action("pkgdown") site_url <- tidyverse_url(url = site$html_url, tr = tr) use_pkgdown_url(url = site_url, tr = tr) if (is_posit_pkg()) { proj_desc_field_update("Config/Needs/website", "tidyverse/tidytemplate", append = TRUE) } } # helpers ---------------------------------------------------------------------- use_pkgdown_url <- function(url, tr = NULL) { tr <- tr %||% target_repo(github_get = TRUE) config_path <- pkgdown_config_path() ui_bullets(c( "v" = "Recording {.url {url}} as site's {.field url} in {.path {pth(config_path)}}." )) config <- pkgdown_config_meta() if (has_name(config, "url")) { config$url <- url } else { config <- c(url = url, config) } write_utf8(config_path, yaml::as.yaml(config)) proj_desc_field_update("URL", url, append = TRUE) if (has_package_doc()) { ui_bullets(c( "_" = "Run {.run devtools::document()} to update package-level documentation." )) } gh <- gh_tr(tr) homepage <- gh("GET /repos/{owner}/{repo}")[["homepage"]] if (is.null(homepage) || homepage != url) { ui_bullets(c( "v" = "Setting {.url {url}} as homepage of GitHub repo {.val {tr$repo_spec}}." )) gh("PATCH /repos/{owner}/{repo}", homepage = url) } invisible() } tidyverse_url <- function(url, tr = NULL) { tr <- tr %||% target_repo(github_get = TRUE) if (!is_interactive() || !tr$repo_owner %in% c("tidyverse", "r-lib", "tidymodels")) { return(url) } custom_url <- glue("https://{tr$repo_name}.{tr$repo_owner}.org") if (grepl(glue("{custom_url}/?"), url)) { return(url) } if (ui_yep(c( "i" = "{.val {tr$repo_name}} is owned by the {.val {tr$repo_owner}} GitHub organization.", " " = "Shall we configure {.val {custom_url}} as the (eventual) pkgdown URL?" ))) { custom_url } else { url } } pkgdown_config_path <- function() { path_first_existing( proj_path( c( "_pkgdown.yml", "_pkgdown.yaml", "pkgdown/_pkgdown.yml", "pkgdown/_pkgdown.yaml", "inst/_pkgdown.yml", "inst/_pkgdown.yaml" ) ) ) } uses_pkgdown <- function() { !is.null(pkgdown_config_path()) } pkgdown_config_meta <- function() { if (!uses_pkgdown()) { return(list()) } path <- pkgdown_config_path() yaml::read_yaml(path) %||% list() } pkgdown_url <- function(pedantic = FALSE) { if (!uses_pkgdown()) { return(NULL) } meta <- pkgdown_config_meta() url <- meta$url if (!is.null(url)) { return(url) } if (pedantic) { ui_bullets(c( "!" = "{.pkg pkgdown} config does not specify the site's {.field url}, which is optional but recommended." )) } NULL } usethis/R/github-pages.R0000644000176200001440000001331214651000165014637 0ustar liggesusers#' Configure a GitHub Pages site #' #' Activates or reconfigures a GitHub Pages site for a project hosted on GitHub. #' This function anticipates two specific usage modes: #' * Publish from the root directory of a `gh-pages` branch, which is assumed to #' be only (or at least primarily) a remote branch. Typically the `gh-pages` #' branch is managed by an automatic "build and deploy" job, such as the one #' configured by [`use_github_action("pkgdown")`][use_github_action()]. #' * Publish from the `"/docs"` directory of a "regular" branch, probably the #' repo's default branch. The user is assumed to have a plan for how they will #' manage the content below `"/docs"`. #' #' @param branch,path Branch and path for the site source. The default of #' `branch = "gh-pages"` and `path = "/"` reflects strong GitHub support for #' this configuration: when a `gh-pages` branch is first created, it is #' *automatically* published to Pages, using the source found in `"/"`. If a #' `gh-pages` branch does not yet exist on the host, `use_github_pages()` #' creates an empty, orphan remote branch. #' #' The most common alternative is to use the repo's default branch, coupled #' with `path = "/docs"`. It is the user's responsibility to ensure that this #' `branch` pre-exists on the host. #' #' Note that GitHub does not support an arbitrary `path` and, at the time of #' writing, only `"/"` or `"/docs"` are accepted. #' @param cname Optional, custom domain name. The `NA` default means "don't set #' or change this", whereas a value of `NULL` removes any previously #' configured custom domain. #' #' Note that this *can* add or modify a CNAME file in your repository. If you #' are using Pages to host a pkgdown site, it is better to specify its URL in #' the pkgdown config file and let pkgdown manage CNAME. #' #' @seealso #' * [use_pkgdown_github_pages()] combines `use_github_pages()` with other #' functions to fully configure a pkgdown site #' * #' * #' @return Site metadata returned by the GitHub API, invisibly #' @export #' #' @examples #' \dontrun{ #' use_github_pages() #' use_github_pages(branch = git_default_branch(), path = "/docs") #' } use_github_pages <- function(branch = "gh-pages", path = "/", cname = NA) { check_name(branch) check_name(path) check_string(cname, allow_empty = FALSE, allow_na = TRUE, allow_null = TRUE) tr <- target_repo(github_get = TRUE, ok_configs = c("ours", "fork")) check_can_push(tr = tr, "to turn on GitHub Pages") gh <- gh_tr(tr) safe_gh <- purrr::safely(gh) if (branch == "gh-pages") { new_branch <- create_gh_pages_branch(tr, branch = "gh-pages") if (new_branch) { # merely creating gh-pages branch automatically activates publishing # BUT we need to give the servers time to sync up before a new GET # retrieves accurate info... ask me how I know Sys.sleep(2) } } site <- safe_gh("GET /repos/{owner}/{repo}/pages")[["result"]] if (is.null(site)) { ui_bullets(c( "v" = "Activating GitHub Pages for {.val {tr$repo_spec}}." )) site <- gh( "POST /repos/{owner}/{repo}/pages", source = list(branch = branch, path = path), .accept = "application/vnd.github.switcheroo-preview+json" ) } need_update <- site$source$branch != branch || site$source$path != path || (is.null(cname) && !is.null(site$cname)) || (is_string(cname) && (is.null(site$cname) || cname != site$cname)) if (need_update) { args <- list( endpoint = "PUT /repos/{owner}/{repo}/pages", source = list(branch = branch, path = path) ) if (is.null(cname) && !is.null(site$cname)) { # this goes out as a JSON `null`, which is necessary to clear cname args$cname <- NA } if (is_string(cname) && (is.null(site$cname) || cname != site$cname)) { args$cname <- cname } Sys.sleep(2) exec(gh, !!!args) Sys.sleep(2) site <- safe_gh("GET /repos/{owner}/{repo}/pages")[["result"]] } ui_bullets(c("v" = "GitHub Pages is publishing from:")) if (!is.null(site$cname)) { kv_line("Custom domain", site$cname) } kv_line("URL", site$html_url) kv_line("Branch", site$source$branch) kv_line("Path", site$source$path) invisible(site) } # returns FALSE if it does NOT create the branch (because it already exists) # returns TRUE if it does create the branch create_gh_pages_branch <- function(tr, branch = "gh-pages") { gh <- gh_tr(tr) safe_gh <- purrr::safely(gh) branch_GET <- safe_gh( "GET /repos/{owner}/{repo}/branches/{branch}", branch = branch ) if (!inherits(branch_GET$error, "http_error_404")) { return(FALSE) } ui_bullets(c( "v" = "Initializing empty, orphan branch {.val {branch}} in GitHub repo {.val {tr$repo_spec}}." )) # GitHub no longer allows you to directly create an empty tree # hence this roundabout method of getting an orphan branch with no files tree <- gh( "POST /repos/{owner}/{repo}/git/trees", tree = list(list( path = "_temp_file_ok_to_delete", mode = "100644", type = "blob", content = "" )) ) commit <- gh( "POST /repos/{owner}/{repo}/git/commits", message = "Init orphan branch", tree = tree$sha ) ref <- gh( "POST /repos/{owner}/{repo}/git/refs", ref = glue("refs/heads/{branch}"), sha = commit$sha ) # this should succeed, but if somehow it does not, it's not worth failing and # leaving pkgdown + GitHub Pages setup half-done --> why I use safe_gh() safe_gh( "DELETE /repos/{owner}/{repo}/contents/_temp_file_ok_to_delete", message = "Remove temp file", sha = purrr::pluck(tree, "tree", 1, "sha"), branch = branch ) TRUE } usethis/R/github_token.R0000644000176200001440000002403114717524721014756 0ustar liggesusers#' Get help with GitHub personal access tokens #' #' @description #' A [personal access #' token](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line) #' (PAT) is needed for certain tasks usethis does via the GitHub API, such as #' creating a repository, a fork, or a pull request. If you use HTTPS remotes, #' your PAT is also used when interacting with GitHub as a conventional Git #' remote. These functions help you get and manage your PAT: #' * `gh_token_help()` guides you through token troubleshooting and setup. #' * `create_github_token()` opens a browser window to the GitHub form to #' generate a PAT, with suggested scopes pre-selected. It also offers advice #' on storing your PAT. #' * `gitcreds::gitcreds_set()` helps you register your PAT with the Git #' credential manager used by your operating system. Later, other packages, #' such as usethis, gert, and gh can automatically retrieve that PAT and use #' it to work with GitHub on your behalf. #' #' Usually, the first time the PAT is retrieved in an R session, it is cached in #' an environment variable, for easier reuse for the duration of that R session. #' After initial acquisition and storage, all of this should happen #' automatically in the background. GitHub is encouraging the use of PATs that #' expire after, e.g., 30 days, so prepare yourself to re-generate and re-store #' your PAT periodically. #' #' Git/GitHub credential management is covered in a dedicated article: [Managing #' Git(Hub) #' Credentials](https://usethis.r-lib.org/articles/articles/git-credentials.html) #' #' @details #' `create_github_token()` has previously gone by some other names: #' `browse_github_token()` and `browse_github_pat()`. #' #' @param scopes Character vector of token scopes, pre-selected in the web form. #' Final choices are made in the GitHub form. Read more about GitHub API #' scopes at #' . #' @param description Short description or nickname for the token. You might #' (eventually) have multiple tokens on your GitHub account and a label can #' help you keep track of what each token is for. #' @inheritParams use_github #' #' @seealso [gh::gh_whoami()] for information on an existing token and #' `gitcreds::gitcreds_set()` and `gitcreds::gitcreds_get()` for a secure way #' to store and retrieve your PAT. #' #' @return Nothing #' @name github-token NULL #' @export #' @rdname github-token #' @examples #' \dontrun{ #' create_github_token() #' } create_github_token <- function(scopes = c("repo", "user", "gist", "workflow"), description = "DESCRIBE THE TOKEN'S USE CASE", host = NULL) { scopes <- glue_collapse(scopes, ",") host <- get_hosturl(host %||% default_api_url()) url <- glue( "{host}/settings/tokens/new?scopes={scopes}&description={description}" ) withr::defer(view_url(url)) hint <- code_hint_with_host("gitcreds::gitcreds_set", host) message <- c( "_" = "Call {.run {hint}} to register this token in the local Git credential store." ) if (is_linux()) { message <- c( message, "!" = "On Linux, it can be tricky to store credentials persistently.", "i" = "Read more in the {.href ['Managing Git(Hub) Credentials' article](https://usethis.r-lib.org/articles/articles/git-credentials.html)}." ) } message <- c( message, "i" = "It is also a great idea to store this token in any password-management software that you use." ) ui_bullets(message) invisible() } #' @inheritParams use_github #' @export #' @rdname github-token #' @examples #' \dontrun{ #' gh_token_help() #' } gh_token_help <- function(host = NULL) { host_url <- get_hosturl(host %||% default_api_url()) kv_line("GitHub host", host_url) pat_sitrep(host_url, scope = "project") } code_hint_with_host <- function(function_name, host = NULL, arg_name = NULL) { arg_hint <- function(host, arg_name) { if (is.null(host) || is_github_dot_com(host)) { return("") } if (is_null(arg_name)) { glue('"{host}"') } else { glue('{arg_name} = "{host}"') } } glue_chr("{function_name}({arg_hint(host, arg_name)})") } # workhorse behind gh_token_help() and called, possibly twice, in git_sitrep() # hence the need for `scold_for_renviron = TRUE/FALSE` # scope determines if "global" or "de_facto" email is checked pat_sitrep <- function(host = "https://github.com", scope = c("user", "project"), scold_for_renviron = TRUE) { scope <- rlang::arg_match(scope) if (scold_for_renviron) { scold_for_renviron() } maybe_pat <- purrr::safely(gh::gh_token)(api_url = host) if (is.null(maybe_pat$result)) { ui_bullets(c( "x" = "The PAT discovered for {.url {host}} has the wrong structure." )) ui_bullets(c("i" = maybe_pat$error)) return(invisible(FALSE)) } pat <- maybe_pat$result have_pat <- pat != "" if (!have_pat) { kv_line("Personal access token for {.val {host}}", NULL) hint <- code_hint_with_host("usethis::create_github_token", host, "host") ui_bullets(c( "_" = "To create a personal access token, call {.run {hint}}." )) hint <- code_hint_with_host("gitcreds::gitcreds_set", host) url <- "https://usethis.r-lib.org/articles/articles/git-credentials.html" ui_bullets(c( "_" = "To store a token for current and future use, call {.run {hint}}.", "i" = "Read more in the {.href [Managing Git(Hub) Credentials]({url})} article." )) return(invisible(FALSE)) } kv_line("Personal access token for {.val {host}}", ui_special("discovered")) online <- is_online(host) if (!online) { ui_bullets(c( "x" = "Host is not reachable.", " " = "No further vetting of the personal access token is possible.", "_" = "Try again when {.val {host}} can be reached." )) return(invisible()) } maybe_who <- purrr::safely(gh::gh_whoami)(.token = pat, .api_url = host) if (is.null(maybe_who$result)) { message <- c("x" = "Can't get user information for this token.") if (inherits(maybe_who$error, "http_error_401")) { message <- c( message, "i" = "The token may no longer be valid or perhaps it lacks the {.val user} scope." ) } message <- c( message, "i" = maybe_who$error$message ) ui_bullets(message) return(invisible(FALSE)) } who <- maybe_who$result kv_line("GitHub user", who$login) scopes <- strsplit(who$scopes, ", ")[[1]] kv_line("Token scopes", scopes) scold_for_scopes(scopes) maybe_emails <- purrr::safely(gh::gh)("/user/emails", .token = pat, .api_url = host) if (is.null(maybe_emails$result)) { ui_bullets(c( "x" = "Can't retrieve registered email addresses from GitHub.", "i" = "Consider re-creating your PAT with the {.val user} (or at least {.val user:email}) scope." )) } else { emails <- maybe_emails$result addresses <- map_chr( emails, ~ if (.x$primary) glue_data(.x, "{email} (primary)") else .x[["email"]] ) kv_line("Email(s)", addresses) ui_silence( user <- git_user_get(where_from_scope(scope)) ) git_user_check(user) if (!is.null(user$email) && !any(grepl(user$email, addresses))) { ui_bullets(c( "x" = "Git user's email ({.val {user$email}}) doesn't appear to be registered with GitHub host." )) } } invisible(TRUE) } scold_for_renviron <- function() { renviron_path <- scoped_path_r("user", ".Renviron", envvar = "R_ENVIRON_USER") if (!file_exists(renviron_path)) { return(invisible()) } renviron_lines <- read_utf8(renviron_path) fishy_lines <- grep("^GITHUB_(PAT|TOKEN).*=.+", renviron_lines, value = TRUE) if (length(fishy_lines) == 0) { return(invisible()) } fishy_keys <- re_match(fishy_lines, "^(?.+)=.+")$key # TODO: when I switch to cli, this is a good place for `!` # in general, lots below is suboptimal, but good enough for now ui_bullets(c( "!" = "{.path {pth(renviron_path)}} defines{cli::qty(length(fishy_keys))} the environment variable{?s}:", bulletize(fishy_keys), "!" = "This can prevent your PAT from being retrieved from the Git credential store.", "i" = "If you are troubleshooting PAT problems, the root cause may be an old, invalid PAT defined in {.path {pth(renviron_path)}}.", "i" = "For most use cases, it is better to NOT define the PAT in {.file .Renviron}.", "_" = "Call {.run usethis::edit_r_environ()} to edit that file.", "_" = "Then call {.run gitcreds::gitcreds_set()} to put the PAT into the Git credential store." )) invisible() } scold_for_scopes <- function(scopes) { if (length(scopes) == 0) { ui_bullets(c( "x" = "Token has no scopes!", "i" = "Tokens initiated with {.fun create_github_token} default to the recommended scopes." )) return(invisible()) } # https://docs.github.com/en/free-pro-team@latest/developers/apps/scopes-for-oauth-apps # why these checks? # previous defaults for create_github_token(): repo, gist, user:email # more recently: repo, user, gist, workflow # (gist scope is a very weak recommendation) has_repo <- "repo" %in% scopes has_workflow <- "workflow" %in% scopes has_user_email <- "user" %in% scopes || "user:email" %in% scopes if (has_repo && has_workflow && has_user_email) { return(invisible()) } suggestions <- c( "*" = if (!has_repo) "{.val repo}: needed to fully access user's repos", "*" = if (!has_workflow) "{.val workflow}: needed to manage GitHub Actions workflow files", "*" = if (!has_user_email) "{.val user:email}: needed to read user's email addresses" ) message <- c( "!" = "Token lacks recommended scopes:", suggestions, "i" = "Consider re-creating your PAT with the missing scopes.", "i" = "Tokens initiated with {.fun usethis::create_github_token} default to the recommended scopes." ) ui_bullets(message) } usethis/R/tutorial.R0000644000176200001440000000274714651000165014135 0ustar liggesusers#' Create a learnr tutorial #' #' Creates a new tutorial below `inst/tutorials/`. Tutorials are interactive R #' Markdown documents built with the [`learnr` #' package](https://rstudio.github.io/learnr/index.html). `use_tutorial()` does #' this setup: #' * Adds learnr to Suggests in `DESCRIPTION`. #' * Gitignores `inst/tutorials/*.html` so you don't accidentally track #' rendered tutorials. #' * Creates a new `.Rmd` tutorial from a template and, optionally, opens it #' for editing. #' * Adds new `.Rmd` to `.Rbuildignore`. #' #' @param name Base for file name to use for new `.Rmd` tutorial. Should consist #' only of numbers, letters, `_` and `-`. We recommend using lower case. #' @param title The human-facing title of the tutorial. #' @inheritParams use_template #' @seealso The [learnr package #' documentation](https://rstudio.github.io/learnr/index.html). #' @export #' @examples #' \dontrun{ #' use_tutorial("learn-to-do-stuff", "Learn to do stuff") #' } use_tutorial <- function(name, title, open = rlang::is_interactive()) { check_name(name) check_name(title) dir_path <- path("inst", "tutorials", name) dir_create(dir_path) use_directory(dir_path) use_git_ignore("*.html", directory = dir_path) use_dependency("learnr", "Suggests") path <- path(dir_path, asciify(name), ext = "Rmd") new <- use_template( "tutorial-template.Rmd", save_as = path, data = list(tutorial_title = title), ignore = FALSE, open = open ) invisible(new) } usethis/R/github-actions.R0000644000176200001440000002325214717524721015220 0ustar liggesusers#' Set up a GitHub Actions workflow #' #' @description #' Sets up continuous integration (CI) for an R package that is developed on #' GitHub using [GitHub Actions](https://github.com/features/actions). CI can be #' used to trigger various operations for each push or pull request, e.g. #' running `R CMD check` or building and deploying a pkgdown site. #' #' ## Workflows #' #' There are four particularly important workflows that are used by many #' packages: #' #' * `check-standard`: Run `R CMD check` using R-latest on Linux, Mac, and #' Windows, and using R-devel and R-oldrel on Linux. This is a good baseline #' if you plan on submitting your package to CRAN. #' * `test-coverage`: Compute test coverage and report to #' by calling [covr::codecov()]. #' * `pkgdown`: Automatically build and publish a pkgdown website. #' But we recommend instead calling [use_pkgdown_github_pages()] which #' performs other important set up. #' * `pr-commands`: Enables the use of two R-specific commands in pull request #' issue comments: `/document` to run `roxygen2::roxygenise()` and #' `/style` to run `styler::style_pkg()`. Both will update the PR with any #' changes once they're done. #' #' If you call `use_github_action()` without arguments, you'll be prompted to #' pick from one of these. Otherwise you can see a complete list of #' possibilities provided by r-lib at #' , or you can supply #' your own `url` to use any other workflow. #' #' @param name For `use_github_action()`: Name of one of the example workflow #' from (with or #' without extension), e.g. `"pkgdown"`, `"check-standard.yaml"`. #' #' If the `name` starts with `check-`, `save_as` will default to #' `R-CMD-check.yaml` and `badge` default to `TRUE`. #' @param ref Desired Git reference, usually the name of a tag (`"v2"`) or #' branch (`"main"`). Other possibilities include a commit SHA (`"d1c516d"`) #' or `"HEAD"` (meaning "tip of remote's default branch"). If not specified, #' defaults to the latest published release of `r-lib/actions` #' (). #' @param url The full URL to a `.yaml` file on GitHub. See more details in #' [use_github_file()]. #' @param save_as Name of the local workflow file. Defaults to `name` or #' `fs::path_file(url)` for `use_github_action()`. Do not specify any other #' part of the path; the parent directory will always be `.github/workflows`, #' within the active project. #' @param readme The full URL to a `README` file that provides more details #' about the workflow. Ignored when `url` is `NULL`. #' @param badge Should we add a badge to the `README`? #' @inheritParams use_template #' #' @examples #' \dontrun{ #' use_github_action() #' #' use_github_action_check_standard() #' #' use_github_action("pkgdown") #' } #' @export use_github_action <- function(name = NULL, ref = NULL, url = NULL, save_as = NULL, readme = NULL, ignore = TRUE, open = FALSE, badge = NULL) { maybe_name(name) maybe_name(ref) maybe_name(url) maybe_name(save_as) maybe_name(readme) check_bool(ignore) check_bool(open) check_bool(badge, allow_null = TRUE) if (is.null(url)) { name <- name %||% choose_gha_workflow() if (path_ext(name) == "") { name <- path_ext_set(name, "yaml") } ref <- ref %||% latest_release() url <- glue( "https://raw.githubusercontent.com/r-lib/actions/{ref}/examples/{name}" ) readme <- glue( "https://github.com/r-lib/actions/blob/{ref}/examples/README.md" ) } withr::defer(rstudio_git_tickle()) use_dot_github(ignore = ignore) if (is.null(save_as)) { if (is_check_action(url)) { save_as <- "R-CMD-check.yaml" } else { save_as <- path_file(url) } } save_as <- path(".github", "workflows", save_as) create_directory(path_dir(proj_path(save_as))) if (grepl("^http", url)) { # `ignore = FALSE` because we took care of this at directory level, above new <- use_github_file(url, save_as = save_as, ignore = FALSE, open = open) } else { # local file case, https://github.com/r-lib/usethis/issues/1548 contents <- read_utf8(url) new <- write_over(proj_path(save_as), contents) } if (!is.null(readme)) { ui_bullets(c("_" = "Learn more at {.url {readme}}.")) } if (badge %||% is_check_action(url)) { use_github_actions_badge(path_file(save_as)) } if (badge %||% is_coverage_action(url)) { use_codecov_badge(target_repo_spec()) } invisible(new) } choose_gha_workflow <- function(error_call = caller_env()) { if (!is_interactive()) { cli::cli_abort( "{.arg name} is absent and must be supplied", call = error_call ) } prompt <- cli::format_inline( "Which action do you want to add? (0 to exit)\n", "(See {.url https://github.com/r-lib/actions/tree/v2/examples} for other options)" ) # Any changes here also need to be reflected in documentation workflows <- c( "check-standard" = "Run `R CMD check` on Linux, macOS, and Windows", "test-coverage" = "Compute test coverage and report to https://about.codecov.io", "pr-commands" = "Add /document and /style commands for pull requests" ) options <- paste0(cli::style_bold(names(workflows)), ": ", workflows) choice <- utils::menu( title = prompt, choices = options ) if (choice == 0) { cli::cli_abort("Selection terminated", call = error_call) } names(workflows)[choice] } is_check_action <- function(url) { grepl("^check-", path_file(url)) } is_coverage_action <- function(url) { grepl("test-coverage", path_file(url)) } #' Generates a GitHub Actions badge #' #' Generates a GitHub Actions badge and that's all. This exists primarily for #' internal use. #' #' @keywords internal #' @param name Name of the workflow's YAML configuration file (with or without #' extension), e.g. `"R-CMD-check"`, `"R-CMD-check.yaml"`. #' @inheritParams use_github_action #' @export use_github_actions_badge <- function(name = "R-CMD-check.yaml", repo_spec = NULL) { if (path_ext(name) == "") { name <- path_ext_set(name, "yaml") } repo_spec <- repo_spec %||% target_repo_spec() enc_name <- utils::URLencode(name) img <- glue("https://github.com/{repo_spec}/actions/workflows/{enc_name}/badge.svg") url <- glue("https://github.com/{repo_spec}/actions/workflows/{enc_name}") use_badge(path_ext_remove(name), url, img) } # tidyverse GHA setup ---------------------------------------------------------- #' @details #' * `use_tidy_github_actions()`: Sets up the following workflows using [GitHub #' Actions](https://github.com/features/actions): #' - Run `R CMD check` on the current release, devel, and four previous #' versions of R. The build matrix also ensures `R CMD check` is run at #' least once on each of the three major operating systems (Linux, macOS, #' and Windows). #' - Report test coverage. #' - Build and deploy a pkgdown site. #' - Provide two commands to be used in pull requests: `/document` to run #' `roxygen2::roxygenise()` and update the PR, and `/style` to run #' `styler::style_pkg()` and update the PR. #' #' This is how the tidyverse team checks its packages, but it is overkill #' for less widely used packages. Consider using the more streamlined #' workflows set up by [use_github_actions()] or #' [use_github_action_check_standard()]. #' @export #' @rdname tidyverse #' @inheritParams use_github_action use_tidy_github_actions <- function(ref = NULL) { repo_spec <- target_repo_spec() use_github_action("check-full.yaml", ref = ref, badge = TRUE) use_github_action("pr-commands", ref = ref) use_github_action("pkgdown", ref = ref) use_coverage(repo_spec = repo_spec) use_github_action("test-coverage", ref = ref) old_configs <- proj_path(c(".travis.yml", "appveyor.yml")) has_appveyor_travis <- file_exists(old_configs) if (any(has_appveyor_travis)) { if (ui_yep("Remove existing {.path .travis.yml} and {.path appveyor.yml}?")) { file_delete(old_configs[has_appveyor_travis]) ui_bullets(c("_" = "Remove old badges from README.")) } } invisible(TRUE) } # GHA helpers ------------------------------------------------------------------ uses_github_actions <- function() { path <- proj_path(".github", "workflows") file_exists(path) } check_uses_github_actions <- function() { if (uses_github_actions()) { return(invisible()) } ui_abort(c( "Cannot detect that package {.pkg {project_name()}} already uses GitHub Actions.", "Do you need to run {.run [use_github_action()](usethis::use_github_action())}?" )) } latest_release <- function(repo_spec = "https://github.com/r-lib/actions") { parsed <- parse_repo_url(repo_spec) # https://docs.github.com/en/rest/reference/releases#list-releases raw_releases <- gh::gh( "/repos/{owner}/{repo}/releases", owner = spec_owner(parsed$repo_spec), repo = spec_repo(parsed$repo_spec), .api_url = parsed$host, .limit = Inf ) tag_names <- purrr::discard( map_chr(raw_releases, "tag_name"), map_lgl(raw_releases, "prerelease") ) pick_tag(tag_names) } # 1) filter to releases in the latest major version series # 2) return the max, according to R's numeric_version logic pick_tag <- function(nm) { dat <- data.frame(nm = nm, stringsAsFactors = FALSE) dat$version <- numeric_version(sub("^[^0-9]*", "", dat$nm)) dat <- dat[dat$version == max(dat$version), ] dat$nm[1] } usethis/R/citation.R0000644000176200001440000000046314651000165014075 0ustar liggesusers#' Create a CITATION template #' #' Use this if you want to encourage users of your package to cite an #' article or book. #' #' @export use_citation <- function() { check_is_package() use_directory("inst") use_template( "citation-template.R", path("inst", "CITATION"), open = TRUE ) } usethis/R/lifecycle.R0000644000176200001440000000262514651000165014224 0ustar liggesusers#' Use lifecycle badges #' #' @description #' This helper: #' #' * Adds lifecycle as a dependency. #' * Imports [lifecycle::deprecated()] for use in function arguments. #' * Copies the lifecycle badges into `man/figures`. #' * Reminds you how to use the badge syntax. #' #' Learn more at #' #' @seealso [use_lifecycle_badge()] to signal the #' [lifecycle stage](https://lifecycle.r-lib.org/articles/stages.html) of #' your package as whole #' @export use_lifecycle <- function() { check_is_package("use_lifecycle()") check_uses_roxygen("use_lifecycle()") if (!uses_roxygen_md()) { ui_abort(" Turn on roxygen2 markdown support with {.run usethis::use_roxygen_md()}, then try again.") } use_package("lifecycle") use_import_from("lifecycle", "deprecated") dest_dir <- proj_path("man", "figures") create_directory(dest_dir) templ_dir <- path_package("usethis", "templates") templ_files <- dir_ls(templ_dir, glob = "*/lifecycle-*.svg") purrr::walk(templ_files, file_copy, dest_dir, overwrite = TRUE) ui_bullets(c( "v" = "Copied SVG badges to {.path {pth(dest_dir)}}.", "_" = "Add badges in documentation topics by inserting a line like this:", " " = "#' `r lifecycle::badge('experimental')`", " " = "#' `r lifecycle::badge('superseded')`", " " = "#' `r lifecycle::badge('deprecated')`" )) invisible(TRUE) } usethis/R/use_github_file.R0000644000176200001440000001167314651000165015425 0ustar liggesusers#' Copy a file from any GitHub repo into the current project #' #' Gets the content of a file from GitHub, from any repo the user can read, and #' writes it into the active project. This function wraps an endpoint of the #' GitHub API which supports specifying a target reference (i.e. branch, tag, #' or commit) and which follows symlinks. #' #' @param repo_spec A string identifying the GitHub repo or, alternatively, a #' GitHub file URL. Acceptable forms: #' * Plain `OWNER/REPO` spec #' * A blob URL, such as `"https://github.com/OWNER/REPO/blob/REF/path/to/some/file"` #' * A raw URL, such as `"https://raw.githubusercontent.com/OWNER/REPO/REF/path/to/some/file"` #' #' In the case of a URL, the `path`, `ref`, and `host` are extracted from it, in #' addition to the `repo_spec`. #' @param path Path of file to copy, relative to the GitHub repo it lives in. #' This is extracted from `repo_spec` when user provides a URL. #' @param save_as Path of file to create, relative to root of active project. #' Defaults to the last part of `path`, in the sense of `basename(path)` or #' `fs::path_file(path)`. #' @param ref The name of a branch, tag, or commit. By default, the file at #' `path` will be copied from its current state in the repo's default branch. #' This is extracted from `repo_spec` when user provides a URL. #' @inheritParams use_template #' @inheritParams use_github #' @inheritParams write_over #' #' @return A logical indicator of whether a file was written, invisibly. #' @export #' #' @examples #' \dontrun{ #' use_github_file( #' "https://github.com/r-lib/actions/blob/v2/examples/check-standard.yaml" #' ) #' #' use_github_file( #' "r-lib/actions", #' path = "examples/check-standard.yaml", #' ref = "v2", #' save_as = ".github/workflows/R-CMD-check.yaml" #' ) #' } use_github_file <- function(repo_spec, path = NULL, save_as = NULL, ref = NULL, ignore = FALSE, open = FALSE, overwrite = FALSE, host = NULL) { check_name(repo_spec) maybe_name(path) maybe_name(save_as) maybe_name(ref) check_bool(ignore) check_bool(open) check_bool(overwrite) maybe_name(host) dat <- parse_file_url(repo_spec) if (dat$parsed) { repo_spec <- dat$repo_spec path <- dat$path ref <- dat$ref host <- dat$host } save_as <- save_as %||% path_file(path) ref_string <- if (is.null(ref)) "" else glue("@{ref}") github_string <- glue("{repo_spec}/{path}{ref_string}") ui_bullets(c( "v" = "Saving {.val {github_string}} to {.path {pth(save_as)}}." )) lines <- read_github_file( repo_spec = repo_spec, path = path, ref = ref, host = host ) new <- write_over( proj_path(save_as), lines, quiet = TRUE, overwrite = overwrite ) if (ignore) { use_build_ignore(save_as) } if (open && new) { edit_file(proj_path(save_as)) } invisible(new) } read_github_file <- function(repo_spec, path, ref = NULL, host = NULL) { # https://docs.github.com/en/rest/reference/repos#contents # https://docs.github.com/en/rest/reference/repos#if-the-content-is-a-symlink # If the requested {path} points to a symlink, and the symlink's target is a # normal file in the repository, then the API responds with the content of the # file.... tf <- withr::local_tempfile() gh::gh( "/repos/{repo_spec}/contents/{path}", repo_spec = repo_spec, path = path, ref = ref, .api_url = host, .destfile = tf, .accept = "application/vnd.github.v3.raw" ) read_utf8(tf) } # https://github.com/OWNER/REPO/blob/REF/path/to/some/file # https://raw.githubusercontent.com/OWNER/REPO/REF/path/to/some/file # https://github.acme.com/OWNER/REPO/blob/REF/path/to/some/file # https://raw.github.acme.com/OWNER/REPO/REF/path/to/some/file parse_file_url <- function(x) { out <- list( parsed = FALSE, repo_spec = x, path = NULL, ref = NULL, host = NULL ) dat <- re_match(x, github_remote_regex) if (is.na(dat$.match)) { return(out) } # TODO: generalize here for GHE hosts that don't include 'github' if (!grepl("github", dat$host)) { ui_abort("URL doesn't seem to be associated with GitHub.") } if (!grepl("^(raw[.])?github", dat$host) || !nzchar(dat$fragment) || (grepl("^github", dat$host) && !grepl("^/blob/", dat$fragment))) { ui_abort("Can't parse the URL provided via {.arg repo_spec}.") } out$parsed <- TRUE dat$host <- sub("^raw[.]", "", dat$host) dat$host <- sub("^githubusercontent", "github", dat$host) dat$fragment <- sub("^/(blob/)?", "", dat$fragment) dat_fragment <- re_match(dat$fragment, "^(?[^/]+)/(?.+)$") out$repo_spec <- make_spec(owner = dat$repo_owner, repo = dat$repo_name) out$path <- dat_fragment$path out$ref <- dat_fragment$ref out$host <- glue_chr("https://{dat$host}") out } usethis/R/test.R0000644000176200001440000000513714651000165013245 0ustar liggesusers#' Sets up overall testing infrastructure #' #' Creates `tests/testthat/`, `tests/testthat.R`, and adds the testthat package #' to the Suggests field. Learn more in #' #' @param edition testthat edition to use. Defaults to the latest edition, i.e. #' the major version number of the currently installed testthat. #' @param parallel Should tests be run in parallel? This feature appeared in #' testthat 3.0.0; see for #' details and caveats. #' @seealso [use_test()] to create individual test files #' @export #' @examples #' \dontrun{ #' use_testthat() #' #' use_test() #' #' use_test("something-management") #' } use_testthat <- function(edition = NULL, parallel = FALSE) { use_testthat_impl(edition, parallel = parallel) ui_bullets(c( "_" = "Call {.run usethis::use_test()} to initialize a basic test file and open it for editing." )) } use_testthat_impl <- function(edition = NULL, parallel = FALSE) { check_installed("testthat") if (utils::packageVersion("testthat") < "2.1.0") { ui_abort(" {.pkg testthat} 2.1.0 or greater needed. Please install before re-trying") } if (is_package()) { edition <- check_edition(edition) use_dependency("testthat", "Suggests", paste0(edition, ".0.0")) proj_desc_field_update("Config/testthat/edition", as.character(edition), overwrite = TRUE) if (parallel) { proj_desc_field_update("Config/testthat/parallel", "true", overwrite = TRUE) } else { proj_desc()$del("Config/testthat/parallel") } } else { if (!is.null(edition)) { ui_abort("Can't declare {.pkg testthat} edition outside of a package.") } } use_directory(path("tests", "testthat")) use_template( "testthat.R", save_as = path("tests", "testthat.R"), data = list(name = project_name()) ) } check_edition <- function(edition = NULL) { version <- utils::packageVersion("testthat")[[1, c(1, 2)]] if (version[[2]] == "99") { version <- version[[1]] + 1L } else { version <- version[[1]] } if (is.null(edition)) { version } else { if (!is.numeric(edition) || length(edition) != 1) { ui_abort("{.arg edition} must be a single number.") } if (edition > version) { vers <- utils::packageVersion("testthat") ui_abort(" {.var edition} ({edition}) not available in installed verion of {.pkg testthat} ({vers}).") } as.integer(edition) } } uses_testthat <- function() { paths <- proj_path(c(path("inst", "tests"), path("tests", "testthat"))) any(dir_exists(paths)) } usethis/R/cran.R0000644000176200001440000000123214651000165013201 0ustar liggesusers#' CRAN submission comments #' #' Creates `cran-comments.md`, a template for your communications with CRAN when #' submitting a package. The goal is to clearly communicate the steps you have #' taken to check your package on a wide range of operating systems. If you are #' submitting an update to a package that is used by other packages, you also #' need to summarize the results of your [reverse dependency #' checks][use_revdep]. #' #' @export #' @inheritParams use_template use_cran_comments <- function(open = rlang::is_interactive()) { check_is_package("use_cran_comments()") use_template( "cran-comments.md", ignore = TRUE, open = open ) } usethis/R/package.R0000644000176200001440000001525114717524721013673 0ustar liggesusers#' Depend on another package #' #' @description #' `use_package()` adds a CRAN package dependency to `DESCRIPTION` and offers a #' little advice about how to best use it. `use_dev_package()` adds a dependency #' on an in-development package, adding the dev repo to `Remotes` so it will be #' automatically installed from the correct location. There is no helper to #' remove a dependency: to do that, simply remove that package from your #' `DESCRIPTION` file. #' #' `use_package()` exists to support a couple of common maneuvers: #' * Add a dependency to `Imports` or `Suggests` or `LinkingTo`. #' * Add a minimum version to a dependency. #' * Specify the minimum supported version for R. #' #' `use_package()` probably works for slightly more exotic modifications, but at #' some point, you should edit `DESCRIPTION` yourself by hand. There is no #' intention to account for all possible edge cases. #' #' @param package Name of package to depend on. #' @param type Type of dependency: must be one of "Imports", "Depends", #' "Suggests", "Enhances", or "LinkingTo" (or unique abbreviation). Matching #' is case insensitive. #' @param min_version Optionally, supply a minimum version for the package. Set #' to `TRUE` to use the currently installed version or use a version string #' suitable for [numeric_version()], such as "2.5.0". #' @param remote By default, an `OWNER/REPO` GitHub remote is inserted. #' Optionally, you can supply a character string to specify the remote, e.g. #' `"gitlab::jimhester/covr"`, using any syntax supported by the [remotes #' package]( #' https://remotes.r-lib.org/articles/dependencies.html#other-sources). #' #' @seealso The [dependencies section](https://r-pkgs.org/dependencies-mindset-background.html) of #' [R Packages](https://r-pkgs.org). #' #' @export #' @examples #' \dontrun{ #' use_package("ggplot2") #' use_package("dplyr", "suggests") #' use_dev_package("glue") #' #' # Depend on R version 4.1 #' use_package("R", type = "Depends", min_version = "4.1") #' } use_package <- function(package, type = "Imports", min_version = NULL) { if (type == "Imports") { refuse_package(package, verboten = c("tidyverse", "tidymodels")) } changed <- use_dependency(package, type, min_version = min_version) if (changed) { how_to_use(package, type) } invisible() } #' @export #' @rdname use_package use_dev_package <- function(package, type = "Imports", remote = NULL) { refuse_package(package, verboten = c("tidyverse", "tidymodels")) changed <- use_dependency(package, type = type, min_version = TRUE) use_remote(package, remote) if (changed) { how_to_use(package, type) } invisible() } use_remote <- function(package, package_remote = NULL) { desc <- proj_desc() remotes <- desc$get_remotes() if (any(grepl(package, remotes))) { return(invisible()) } if (is.null(package_remote)) { package_desc <- desc::desc(package = package) package_remote <- package_remote(package_desc) } ui_bullets(c( "v" = "Adding {.val {package_remote}} to {.field Remotes} field in DESCRIPTION." )) remotes <- c(remotes, package_remote) desc$set_remotes(remotes) desc$write() invisible() } # Helpers ----------------------------------------------------------------- package_remote <- function(desc) { remote <- as.list(desc$get(c("RemoteType", "RemoteUsername", "RemoteRepo"))) is_recognized_remote <- all(map_lgl(remote, ~ is_string(.x) && !is.na(.x))) if (is_recognized_remote) { # non-GitHub remotes get a 'RemoteType::' prefix if (!identical(remote$RemoteType, "github")) { remote$RemoteUsername <- paste0(remote$RemoteType, "::", remote$RemoteUsername) } return(paste0(remote$RemoteUsername, "/", remote$RemoteRepo)) } package <- desc$get_field("Package") urls <- desc_urls(package, desc = desc) urls <- urls[urls$is_github, ] if (nrow(urls) < 1) { ui_abort("Cannot determine remote for {.pkg {package}}.") } parsed <- parse_github_remotes(urls$url[[1]]) remote <- paste0(parsed$repo_owner, "/", parsed$repo_name) if (ui_yep(c( "!" = "{.pkg {package}} was either installed from CRAN or local source.", "i" = "Based on DESCRIPTION, we propose the remote: {.val {remote}}.", " " = "Is this OK?" ))) { remote } else { ui_abort("Cannot determine remote for {.pkg {package}}.") } } refuse_package <- function(package, verboten) { if (package %in% verboten) { code <- glue('use_package("{package}", type = "depends")') ui_abort(c( "x" = "{.pkg {package}} is a meta-package and it is rarely a good idea to depend on it.", "_" = "Please determine the specific underlying package(s) that provide the function(s) you need and depend on that instead.", "i" = "For data analysis projects that use a package structure but do not implement a formal R package, adding {.pkg {package}} to {.field Depends} is a reasonable compromise.", "_" = "Call {.code {code}} to achieve this." )) } invisible(package) } how_to_use <- function(package, type) { types <- tolower(c("Imports", "Depends", "Suggests", "Enhances", "LinkingTo")) type <- match.arg(tolower(type), types) if (package == "R" && type == "depends") { return("") } switch(type, imports = ui_bullets(c( "_" = "Refer to functions with {.code {paste0(package, '::fun()')}}." )), depends = ui_bullets(c( "!" = "Are you sure you want {.field Depends}? {.field Imports} is almost always the better choice." )), suggests = suggests_usage_hint(package), enhances = "", linkingto = show_includes(package) ) } suggests_usage_hint <- function(package) { imports_rlang <- proj_desc()$has_dep("rlang", type = "Imports") if (imports_rlang) { code1 <- glue('rlang::is_installed("{package}")') code2 <- glue('rlang::check_installed("{package}")') ui_bullets(c( "_" = "In your package code, use {.code {code1}} or {.code {code2}} to test if {.pkg {package}} is installed." )) code <- glue("{package}::fun()") ui_bullets(c("_" = "Then directly refer to functions with {.code {code}}.")) } else { code <- glue('requireNamespace("{package}", quietly = TRUE)') ui_bullets(c("_" = "Use {.code {code}} to test if {.pkg {package}} is installed.")) code <- glue("{package}::fun()") ui_bullets(c("_" = "Then directly refer to functions with {.code {code}}.")) } } show_includes <- function(package) { incl <- path_package("include", package = package) h <- dir_ls(incl, regexp = "[.](h|hpp)$") if (length(h) == 0) { return() } ui_bullets(c("Possible includes are:")) ui_code_snippet("#include <{path_file(h)}>", copy = FALSE, language = "") } usethis/R/rprofile.R0000644000176200001440000000316214651000165014104 0ustar liggesusers#' Helpers to make useful changes to `.Rprofile` #' #' @description #' All functions open your `.Rprofile` and give you the code you need to #' paste in. #' #' * `use_devtools()`: makes devtools available in interactive sessions. #' * `use_usethis()`: makes usethis available in interactive sessions. #' * `use_reprex()`: makes reprex available in interactive sessions. #' * `use_conflicted()`: makes conflicted available in interactive sessions. #' * `use_partial_warnings()`: warns on partial matches. #' #' @name rprofile-helper NULL #' @rdname rprofile-helper #' @export use_conflicted <- function() { use_rprofile_package("conflicted") } #' @rdname rprofile-helper #' @export use_reprex <- function() { use_rprofile_package("reprex") } #' @rdname rprofile-helper #' @export use_usethis <- function() { use_rprofile_package("usethis") } #' @rdname rprofile-helper #' @export use_devtools <- function() { use_rprofile_package("devtools") } use_rprofile_package <- function(package) { check_installed(package) ui_bullets(c( "_" = "Include this code in {.path .Rprofile} to make {.pkg {package}} available in all interactive sessions:" )) ui_code_snippet(" if (interactive()) {{ suppressMessages(require({package})) }}") edit_r_profile("user") } #' @rdname rprofile-helper #' @export use_partial_warnings <- function() { ui_bullets(c( "_" = "Include this code in {.path .Rprofile} to warn on partial matches:" )) ui_code_snippet(" options( warnPartialMatchArgs = TRUE, warnPartialMatchDollar = TRUE, warnPartialMatchAttr = TRUE )") edit_r_profile("user") } usethis/R/rstudio.R0000644000176200001440000001561414651514262013770 0ustar liggesusers#' Add RStudio Project infrastructure #' #' It is likely that you want to use [create_project()] or [create_package()] #' instead of `use_rstudio()`! Both `create_*()` functions can add RStudio #' Project infrastructure to a pre-existing project or package. `use_rstudio()` #' is mostly for internal use or for those creating a usethis-like package for #' their organization. It does the following in the current project, often after #' executing `proj_set(..., force = TRUE)`: #' * Creates an `.Rproj` file #' * Adds RStudio files to `.gitignore` #' * Adds RStudio files to `.Rbuildignore`, if project is a package #' #' @param line_ending Line ending #' @param reformat If `TRUE`, the `.Rproj` is setup with common options that #' reformat files on save: adding a trailing newline, trimming trailing #' whitespace, and setting the line-ending. This is best practice for #' new projects. #' #' If `FALSE`, these options are left unset, which is more appropriate when #' you're contributing to someone else's project that does not have its own #' `.Rproj` file. #' @export use_rstudio <- function(line_ending = c("posix", "windows"), reformat = TRUE) { line_ending <- arg_match(line_ending) line_ending <- c("posix" = "Posix", "windows" = "Windows")[[line_ending]] rproj_file <- paste0(project_name(), ".Rproj") new <- use_template( "template.Rproj", save_as = rproj_file, data = list( line_ending = line_ending, is_pkg = is_package(), reformat = reformat ), ignore = is_package() ) use_git_ignore(".Rproj.user") if (is_package()) { use_build_ignore(".Rproj.user") } invisible(new) } #' Don't save/load user workspace between sessions #' #' R can save and reload the user's workspace between sessions via an `.RData` #' file in the current directory. However, long-term reproducibility is enhanced #' when you turn this feature off and clear R's memory at every restart. #' Starting with a blank slate provides timely feedback that encourages the #' development of scripts that are complete and self-contained. More detail can #' be found in the blog post [Project-oriented #' workflow](https://www.tidyverse.org/blog/2017/12/workflow-vs-script/). #' #' @inheritParams edit #' #' @export use_blank_slate <- function(scope = c("user", "project")) { scope <- match.arg(scope) if (scope == "user") { use_rstudio_preferences( save_workspace = "never", load_workspace = FALSE ) } else { rproj_fields <- modify_rproj( rproj_path(), list(RestoreWorkspace = "No", SaveWorkspace = "No") ) write_utf8(rproj_path(), serialize_rproj(rproj_fields)) restart_rstudio("Restart RStudio with a blank slate?") } invisible() } # Is base_path an RStudio Project or inside an RStudio Project? is_rstudio_project <- function(base_path = proj_get()) { length(rproj_paths(base_path)) == 1 } rproj_paths <- function(base_path, recurse = FALSE) { dir_ls(base_path, regexp = "[.]Rproj$", recurse = recurse) } # Return path to single .Rproj or die trying rproj_path <- function(base_path = proj_get(), call = caller_env()) { rproj <- rproj_paths(base_path) if (length(rproj) == 1) { rproj } else if (length(rproj) == 0) { name <- project_name(base_path) cli::cli_abort("{.val {name}} is not an RStudio Project.", call = call) } else { name <- project_name(base_path) cli::cli_abort( c( "{.val {name}} must contain a single .Rproj file.", i = "Found {.file {path_rel(rproj, base_path)}}." ), call = call ) } } # Is base_path open in RStudio? in_rstudio <- function(base_path = proj_get()) { if (!rstudio_available()) { return(FALSE) } if (!rstudioapi::hasFun("getActiveProject")) { return(FALSE) } proj <- rstudioapi::getActiveProject() if (is.null(proj)) { return(FALSE) } path_real(proj) == path_real(base_path) } # So we can override the default with a mock rstudio_available <- function() { rstudioapi::isAvailable() } in_rstudio_server <- function() { if (!rstudio_available()) { return(FALSE) } identical(rstudioapi::versionInfo()$mode, "server") } parse_rproj <- function(file) { lines <- as.list(read_utf8(file)) has_colon <- grepl(":", lines) fields <- lapply(lines[has_colon], function(x) strsplit(x, split = ": ")[[1]]) lines[has_colon] <- vapply(fields, `[[`, "character", 2) names(lines)[has_colon] <- vapply(fields, `[[`, "character", 1) names(lines)[!has_colon] <- "" lines } modify_rproj <- function(file, update) { utils::modifyList(parse_rproj(file), update) } serialize_rproj <- function(fields) { named <- nzchar(names(fields)) as.character(ifelse(named, paste0(names(fields), ": ", fields), fields)) } # Must be last command run restart_rstudio <- function(message = NULL) { if (!in_rstudio(proj_get())) { return(FALSE) } if (!is_interactive()) { return(FALSE) } if (!is.null(message)) { ui_bullets(message) } if (!rstudioapi::hasFun("openProject")) { return(FALSE) } if (ui_nah("Restart now?")) { return(FALSE) } rstudioapi::openProject(proj_get()) } rstudio_git_tickle <- function() { if (uses_git() && rstudioapi::hasFun("executeCommand")) { rstudioapi::executeCommand("vcsRefresh") } invisible() } rstudio_config_path <- function(...) { if (is_windows()) { # https://github.com/r-lib/usethis/issues/1293 base <- rappdirs::user_config_dir("RStudio", appauthor = NULL) } else { # RStudio only uses windows/unix conventions, not mac base <- rappdirs::user_config_dir("rstudio", os = "unix") } path(base, ...) } #' Set global RStudio preferences #' #' This function allows you to set global RStudio preferences, achieving the #' same effect programmatically as clicking buttons in RStudio's Global Options. #' You can find a list of configurable properties at #' . #' #' @export #' @param ... <[`dynamic-dots`][rlang::dyn-dots]> Property-value pairs. #' @return A named list of the previous values, invisibly. use_rstudio_preferences <- function(...) { new <- dots_list(..., .homonyms = "last") if (length(new) > 0 && !is_named(new)) { cli::cli_abort("All arguments in {.arg ...} must be named.") } json <- rstudio_prefs_read() old <- json[names(new)] for (name in names(new)) { val <- new[[name]] if (identical(json[[name]], val)) { next } ui_bullets(c("v" = "Setting RStudio preference {.field {name}} to {.val {val}}.")) json[[name]] <- val } rstudio_prefs_write(json) invisible(old) } rstudio_prefs_read <- function() { path <- rstudio_config_path("rstudio-prefs.json") if (file_exists(path)) { jsonlite::read_json(path) } else { list() } } rstudio_prefs_write <- function(json) { path <- rstudio_config_path("rstudio-prefs.json") create_directory(path_dir(path)) jsonlite::write_json(json, path, auto_unbox = TRUE, pretty = TRUE) } usethis/R/r.R0000644000176200001440000001526314717524721012544 0ustar liggesusers#' Create or edit R or test files #' #' This pair of functions makes it easy to create paired R and test files, #' using the convention that the tests for `R/foofy.R` should live #' in `tests/testthat/test-foofy.R`. You can use them to create new files #' from scratch by supplying `name`, or if you use RStudio, you can call #' to create (or navigate to) the companion file based on the currently open #' file. This also works when a test snapshot file is active, i.e. if you're #' looking at `tests/testthat/_snaps/foofy.md`, `use_r()` or `use_test()` take #' you to `R/foofy.R` or `tests/testthat/test-foofy.R`, respectively. #' #' @section Renaming files in an existing package: #' #' Here are some tips on aligning file names across `R/` and `tests/testthat/` #' in an existing package that did not necessarily follow this convention #' before. #' #' This script generates a data frame of `R/` and test files that can help you #' identify missed opportunities for pairing: #' #' ``` #' library(fs) #' library(tidyverse) #' #' bind_rows( #' tibble( #' type = "R", #' path = dir_ls("R/", regexp = "\\.[Rr]$"), #' name = as.character(path_ext_remove(path_file(path))), #' ), #' tibble( #' type = "test", #' path = dir_ls("tests/testthat/", regexp = "/test[^/]+\\.[Rr]$"), #' name = as.character(path_ext_remove(str_remove(path_file(path), "^test[-_]"))), #' ) #' ) %>% #' pivot_wider(names_from = type, values_from = path) %>% #' print(n = Inf) #' ``` #' #' The [rename_files()] function can also be helpful. #' #' @param name Either a string giving a file name (without directory) or #' `NULL` to take the name from the currently open file in RStudio. #' @inheritParams edit_file #' @seealso #' * The [testing](https://r-pkgs.org/testing-basics.html) and #' [R code](https://r-pkgs.org/code.html) chapters of #' [R Packages](https://r-pkgs.org). #' * [use_test_helper()] to create a testthat helper file. #' #' @export #' #' @examples #' \dontrun{ #' # create a new .R file below R/ #' use_r("coolstuff") #' #' # if `R/coolstuff.R` is active in a supported IDE, you can now do: #' use_test() #' #' # if `tests/testthat/test-coolstuff.R` is active in a supported IDE, you can #' # return to `R/coolstuff.R` with: #' use_r() #' } use_r <- function(name = NULL, open = rlang::is_interactive()) { use_directory("R") path <- path("R", compute_name(name)) edit_file(proj_path(path), open = open) invisible(TRUE) } #' @rdname use_r #' @export use_test <- function(name = NULL, open = rlang::is_interactive()) { if (!uses_testthat()) { use_testthat_impl() } path <- path("tests", "testthat", paste0("test-", compute_name(name))) if (!file_exists(path)) { use_template("test-example-2.1.R", save_as = path) } edit_file(proj_path(path), open = open) invisible(TRUE) } #' Create or edit a test helper file #' #' This function creates (or opens) a test helper file, typically #' `tests/testthat/helper.R`. Test helper files are executed at the #' beginning of every automated test run and are also executed by #' [`load_all()`][pkgload::load_all]. A helper file is a great place to #' define test helper functions for use throughout your test suite, such as #' a custom expectation. #' #' @param name Can be used to specify the optional "SLUG" in #' `tests/testthat/helper-SLUG.R`. #' @inheritParams edit_file #' @seealso #' * [use_test()] to create a test file. #' * The testthat vignette on special files #' `vignette("special-files", package = "testthat")`. #' @export #' #' @examples #' \dontrun{ #' use_test_helper() #' use_test_helper("mocks") #' } use_test_helper <- function(name = NULL, open = rlang::is_interactive()) { maybe_name(name) if (!uses_testthat()) { ui_abort(c( "x" = "Your package must use {.pkg testthat} to use a helper file.", "_" = "Call {.run usethis::use_testthat()} to set up {.pkg testthat}." )) } target_path <- proj_path( path("tests", "testthat", as_test_helper_file(name)) ) if (!file_exists(target_path)) { ui_bullets(c( "i" = "Test helper files are executed at the start of all automated test runs.", "i" = "{.run devtools::load_all()} also sources test helper files." )) } edit_file(target_path, open = open) invisible(TRUE) } # helpers ----------------------------------------------------------------- compute_name <- function(name = NULL, ext = "R", error_call = caller_env()) { if (!is.null(name)) { check_file_name(name, call = error_call) if (path_ext(name) == "") { name <- path_ext_set(name, ext) } else if (path_ext(name) != ext) { cli::cli_abort( "{.arg name} must have extension {.str {ext}}, not {.str {path_ext(name)}}.", call = error_call ) } return(as.character(name)) } if (!rstudio_available()) { cli::cli_abort( "{.arg name} is absent but must be specified.", call = error_call ) } compute_active_name( path = rstudioapi::getSourceEditorContext()$path, ext = ext, error_call = error_call ) } compute_active_name <- function(path, ext, error_call = caller_env()) { if (is.null(path)) { cli::cli_abort( c( "No file is open in RStudio.", i = "Please specify {.arg name}." ), call = error_call ) } ## rstudioapi can return a path like '~/path/to/file' where '~' means ## R's notion of user's home directory path <- proj_path_prep(path_expand_r(path)) dir <- path_dir(proj_rel_path(path)) if (!dir %in% c("R", "src", "tests/testthat", "tests/testthat/_snaps")) { cli::cli_abort("Open file must be code, test, or snapshot.", call = error_call) } file <- path_file(path) if (dir == "tests/testthat") { file <- gsub("^test[-_]", "", file) } as.character(path_ext_set(file, ext)) } check_file_name <- function(name, call = caller_env()) { if (!is_string(name)) { cli::cli_abort("{.arg name} must be a single string", call = call) } if (name == "") { cli::cli_abort("{.arg name} must not be an empty string", call = call) } if (path_dir(name) != ".") { cli::cli_abort( "{.arg name} must be a file name without directory.", call = call ) } if (!valid_file_name(path_ext_remove(name))) { cli::cli_abort( c( "{.arg name} ({.str {name}}) must be a valid file name.", i = "A valid file name consists of only ASCII letters, numbers, '-', and '_'." ), call = call ) } } valid_file_name <- function(x) { grepl("^[a-zA-Z0-9._-]+$", x) } as_test_helper_file <- function(name = NULL) { file <- name %||% "helper.R" if (!grepl("^helper", file)) { file <- glue("helper-{file}") } if (path_ext(file) == "") { file <- path_ext_set(file, "R") } unclass(file) } usethis/R/utils.R0000644000176200001440000000512714717524721013441 0ustar liggesuserscan_overwrite <- function(path) { if (!file_exists(path)) { return(TRUE) } if (getOption("usethis.overwrite", FALSE)) { # don't activate a project # don't assume `path` is in the active project if (is_in_proj(path) && uses_git()) { # path is in active project return(TRUE) } if (possibly_in_proj(path) && # path is some other project with_project(proj_find(path), uses_git(), quiet = TRUE)) { return(TRUE) } } if (is_interactive()) { ui_yep(c("!" = "Overwrite pre-existing file {.path {pth(path)}}?")) } else { FALSE } } check_is_named_list <- function(x, nm = deparse(substitute(x))) { if (!is_list(x)) { ui_abort("{.code {nm}} must be a list, not {.obj_type_friendly {x}}.") } if (!is_dictionaryish(x)) { ui_abort( "Names of {.code {nm}} must be non-missing, non-empty, and non-duplicated.") } x } dots <- function(...) { eval(substitute(alist(...))) } asciify <- function(x) { check_character(x) gsub("[^a-zA-Z0-9_-]+", "-", x) } compact <- function(x) { is_empty <- vapply(x, function(x) length(x) == 0, logical(1)) x[!is_empty] } # Needed for mocking is_installed <- function(pkg) { rlang::is_installed(pkg) } isFALSE <- function(x) { identical(x, FALSE) } isNA <- function(x) { length(x) == 1 && is.na(x) } path_first_existing <- function(paths) { # manual loop with explicit use of `[[` to retain "fs" class for (i in seq_along(paths)) { path <- paths[[i]] if (file_exists(path)) { return(path) } } NULL } is_online <- function(host) { bare_host <- sub("^https?://(.*)$", "\\1", host) !is.null(curl::nslookup(bare_host, error = FALSE)) } year <- function() format(Sys.Date(), "%Y") pluck_lgl <- function(.x, ...) { as.logical(purrr::pluck(.x, ..., .default = NA)) } pluck_chr <- function(.x, ...) { as.character(purrr::pluck(.x, ..., .default = NA)) } pluck_int <- function(.x, ...) { as.integer(purrr::pluck(.x, ..., .default = NA)) } is_windows <- function() { .Platform$OS.type == "windows" } is_linux <- function() { identical(tolower(Sys.info()[["sysname"]]), "linux") } # For stability of `stringsAsFactors` across versions data.frame <- function(..., stringsAsFactors = FALSE) { base::data.frame(..., stringsAsFactors = stringsAsFactors) } # wrapper around check_name() from import-standalone-types-check.R # for the common case when NULL is allowed (often default) maybe_name <- function(x, ..., arg = caller_arg(x), call = caller_env()) { check_name(x, ..., allow_null = TRUE, arg = arg, call = call) } usethis/R/latest-dependencies.R0000644000176200001440000000251214651000165016200 0ustar liggesusers#' Use "latest" versions of all dependencies #' #' Pins minimum versions of all `Imports` and `Depends` dependencies to latest #' ones (as determined by `source`). Useful for the tidyverse package, but #' should otherwise be used with extreme care. #' #' @keywords internal #' @export #' @param overwrite By default (`TRUE`), all dependencies will be modified. #' Set to `FALSE` to only modify dependencies without version #' specifications. #' @param source Use "CRAN" or "local" package versions. use_latest_dependencies <- function(overwrite = TRUE, source = c("CRAN", "local")) { source <- arg_match(source) desc <- proj_desc() updated <- update_versions( desc$get_deps(), overwrite = overwrite, source = source ) desc$set_deps(updated) desc$write() invisible(TRUE) } update_versions <- function(deps, overwrite = TRUE, source = c("CRAN", "local")) { baserec <- base_and_recommended() to_change <- !deps$package %in% c("R", baserec) & deps$type != "Suggests" if (!overwrite) { to_change <- to_change & deps$version == "*" } packages <- deps$package[to_change] versions <- switch(match.arg(source), local = map_chr(packages, ~ as.character(utils::packageVersion(.x))), CRAN = utils::available.packages()[packages, "Version"] ) deps$version[to_change] <- paste0(">= ", versions) deps } usethis/R/browse.R0000644000176200001440000001740314717524721013602 0ustar liggesusers#' Visit important project-related web pages #' #' These functions take you to various web pages associated with a project #' (often, an R package) and return the target URL(s) invisibly. To form #' these URLs we consult: #' * Git remotes configured for the active project that appear to be hosted on #' a GitHub deployment #' * DESCRIPTION file for the active project or the specified `package`. The #' DESCRIPTION file is sought first in the local package library and then #' on CRAN. #' * Fixed templates: #' - Circle CI: `https://circleci.com/gh/{OWNER}/{PACKAGE}` #' - CRAN landing page: `https://cran.r-project.org/package={PACKAGE}` #' - GitHub mirror of a CRAN package: `https://github.com/cran/{PACKAGE}` #' Templated URLs aren't checked for existence, so there is no guarantee #' there will be content at the destination. #' #' @details #' * `browse_package()`: Assembles a list of URLs and lets user choose one to #' visit in a web browser. In a non-interactive session, returns all #' discovered URLs. #' * `browse_project()`: Thin wrapper around `browse_package()` that always #' targets the active usethis project. #' * `browse_github()`: Visits a GitHub repository associated with the project. #' In the case of a fork, you might be asked to specify if you're interested #' in the source repo or your fork. #' * `browse_github_issues()`: Visits the GitHub Issues index or one specific #' issue. #' * `browse_github_pulls()`: Visits the GitHub Pull Request index or one #' specific pull request. #' * `browse_circleci()`: Visits the project's page on #' [Circle CI](https://circleci.com). #' * `browse_cran()`: Visits the package on CRAN, via the canonical URL. #' #' @param package Name of package. If `NULL`, the active project is targeted, #' regardless of whether it's an R package or not. #' @param number Optional, to specify an individual GitHub issue or pull #' request. Can be a number or `"new"`. #' #' @examples #' # works on the active project #' # browse_project() #' #' browse_package("httr") #' browse_github("gh") #' browse_github_issues("fs") #' browse_github_issues("fs", 1) #' browse_github_pulls("curl") #' browse_github_pulls("curl", 183) #' browse_cran("MASS") #' @name browse-this NULL #' @export #' @rdname browse-this browse_package <- function(package = NULL) { maybe_name(package) if (is.null(package)) { check_is_project() } urls <- character() details <- list() if (is.null(package) && uses_git()) { grl <- github_remote_list(these = NULL) ord <- c( which(grl$remote == "origin"), which(grl$remote == "upstream"), which(!grl$remote %in% c("origin", "upstream")) ) grl <- grl[ord, ] grl <- set_names(grl$url, nm = grl$remote) parsed <- parse_github_remotes(grl) urls <- c(urls, glue_data(parsed, "https://{host}/{repo_owner}/{repo_name}")) details <- c( details, map(parsed$name, ~ cli::cli_fmt(cli::cli_text("{.val {.x}} remote"))) ) } desc_urls_dat <- desc_urls(package, include_cran = TRUE) urls <- c(urls, desc_urls_dat$url) details <- c( details, map( desc_urls_dat$desc_field, ~ if (is.na(.x)) "CRAN" else cli::cli_fmt(cli::cli_text("{.field {.x}} field in DESCRIPTION")) ) ) if (length(urls) == 0) { ui_bullets(c(x = "Can't find any URLs.")) return(invisible(character())) } if (!is_interactive()) { return(invisible(urls)) } prompt <- "Which URL do you want to visit? (0 to exit)" pretty <- purrr::map2( format(urls, justify = "left"), details, ~ glue("{.x} ({.y})") ) choice <- utils::menu(title = prompt, choices = pretty) if (choice == 0) { return(invisible(character())) } view_url(urls[choice]) } #' @export #' @rdname browse-this browse_project <- function() browse_package(NULL) #' @export #' @rdname browse-this browse_github <- function(package = NULL) { view_url(github_url(package)) } #' @export #' @rdname browse-this browse_github_issues <- function(package = NULL, number = NULL) { view_url(github_url(package), "issues", number) } #' @export #' @rdname browse-this browse_github_pulls <- function(package = NULL, number = NULL) { pull <- if (is.null(number)) "pulls" else "pull" view_url(github_url(package), pull, number) } #' @export #' @rdname browse-this browse_github_actions <- function(package = NULL) { view_url(github_url(package), "actions") } #' @export #' @rdname browse-this browse_circleci <- function(package = NULL) { gh <- github_url(package) circle_url <- "circleci.com/gh" view_url(sub("github.com", circle_url, gh)) } #' @export #' @rdname browse-this browse_cran <- function(package = NULL) { view_url(cran_home(package)) } # Try to get a GitHub repo spec from these places: # 1. Remotes associated with GitHub (active project) # 2. BugReports/URL fields of DESCRIPTION (active project or arbitrary # installed package) github_url <- function(package = NULL) { maybe_name(package) if (is.null(package)) { check_is_project() url <- github_url_from_git_remotes() if (!is.null(url)) { return(url) } } desc_urls_dat <- desc_urls(package) if (is.null(desc_urls_dat)) { if (is.null(package)) { ui_abort(c( "Project {.val {project_name()}} has no DESCRIPTION file and has no GitHub remotes configured.", "No way to discover URLs." )) } else { ui_abort(c( "Can't find DESCRIPTION for package {.pkg {package}} locally or on CRAN.", "No way to discover URLs." )) } } desc_urls_dat <- desc_urls_dat[desc_urls_dat$is_github, ] if (nrow(desc_urls_dat) > 0) { parsed <- parse_github_remotes(desc_urls_dat$url[[1]]) return(glue_data_chr(parsed, "https://{host}/{repo_owner}/{repo_name}")) } if (is.null(package)) { ui_abort(" Project {.val {project_name()}} has no GitHub remotes configured and has no GitHub URLs in DESCRIPTION.") } cli::cli_warn(c( "!" = "Package {.val {package}} has no GitHub URLs in DESCRIPTION.", " " = "Trying the GitHub CRAN mirror.")) glue_chr("https://github.com/cran/{package}") } cran_home <- function(package = NULL) { package <- package %||% project_name() glue_chr("https://cran.r-project.org/package={package}") } # returns NULL, if no DESCRIPTION found # returns 0-row data frame, if DESCRIPTION holds no URLs # returns data frame, if successful # include_cran whether to include CRAN landing page, if we consult it desc_urls <- function(package = NULL, include_cran = FALSE, desc = NULL) { maybe_desc <- purrr::possibly(desc::desc, otherwise = NULL) desc_from_cran <- FALSE if (is.null(desc)) { if (is.null(package)) { desc <- maybe_desc(file = proj_get()) if (is.null(desc)) { return() } } else { desc <- maybe_desc(package = package) if (is.null(desc)) { cran_desc_url <- glue("https://cran.rstudio.com/web/packages/{package}/DESCRIPTION") suppressWarnings( desc <- maybe_desc(text = readLines(cran_desc_url)) ) if (is.null(desc)) { return() } desc_from_cran <- TRUE } } } url <- desc$get_urls() bug_reports <- desc$get_field("BugReports", default = character()) cran <- if (include_cran && desc_from_cran) cran_home(package) else character() dat <- data.frame( desc_field = c( rep_len("URL", length.out = length(url)), rep_len("BugReports", length.out = length(bug_reports)), rep_len(NA, length.out = length(cran)) ), url = c(url, bug_reports, cran), stringsAsFactors = FALSE ) dat <- cbind(dat, re_match(dat$url, github_remote_regex)) # TODO: could have a more sophisticated understanding of GitHub deployments dat$is_github <- !is.na(dat$.match) & grepl("github", dat$host) dat[c("url", "desc_field", "is_github")] } usethis/R/logo.R0000644000176200001440000000450214651514262013231 0ustar liggesusers#' Use a package logo #' #' This function helps you use a logo in your package: #' * Enforces a specific size #' * Stores logo image file at `man/figures/logo.png` #' * Produces the markdown text you need in README to include the logo #' #' @param img The path to an existing image file #' @param geometry a [magick::geometry] string specifying size. The default #' assumes that you have a hex logo using spec from #' . #' @param retina `TRUE`, the default, scales the image on the README, #' assuming that geometry is double the desired size. #' #' @examples #' \dontrun{ #' use_logo("usethis.png") #' } #' @export use_logo <- function(img, geometry = "240x278", retina = TRUE) { check_is_package("use_logo()") ext <- tolower(path_ext(img)) logo_path <- proj_path("man", "figures", "logo", ext = ext) create_directory(path_dir(logo_path)) if (!can_overwrite(logo_path)) { return(invisible(FALSE)) } if (ext == "svg") { logo_path <- path("man", "figures", "logo.svg") file_copy(img, proj_path(logo_path), overwrite = TRUE) ui_bullets(c("v" = "Copied {.path {pth(img)}} to {.path {logo_path}}.")) height <- as.integer(sub(".*x", "", geometry)) } else { check_installed("magick") img_data <- magick::image_read(img) img_data <- magick::image_resize(img_data, geometry) magick::image_write(img_data, logo_path) ui_bullets(c("v" = "Resized {.path {pth(img)}} to {geometry}.")) height <- magick::image_info(magick::image_read(logo_path))$height } pkg <- project_name() if (retina) { height <- round(height / 2) } # Have a clickable hyperlink to jump to README if exists. readme_path <- find_readme() if (is.null(readme_path)) { readme_show <- "your README" } else { readme_show <- cli::format_inline("{.path {pth(readme_path)}}") } ui_bullets(c("_" = "Add logo to {readme_show} with the following html:")) pd_link <- pkgdown_url(pedantic = TRUE) if (is.null(pd_link)) { ui_code_snippet( "# {pkg} \"\"", language = "" ) } else { ui_code_snippet( "# {pkg} \"{pkg}", language = "" ) } } usethis/R/ci.R0000644000176200001440000000567014651000165012663 0ustar liggesusers#' Continuous integration setup and badges #' #' @description #' `r lifecycle::badge("questioning")` #' #' These functions are not actively used by the tidyverse team, and may not #' currently work. Use at your own risk. #' #' Sets up third-party continuous integration (CI) services for an R package #' on GitLab or CircleCI. These functions: #' #' * Add service-specific configuration files and add them to `.Rbuildignore`. #' * Activate a service or give the user a detailed prompt. #' * Provide the markdown to insert a badge into README. #' #' @section `use_gitlab_ci()`: #' Adds a basic `.gitlab-ci.yml` to the top-level directory of a package. This #' is a configuration file for the [GitLab #' CI/CD](https://docs.gitlab.com/ee/ci/) continuous integration service. #' @export use_gitlab_ci <- function() { check_uses_git() new <- use_template( "gitlab-ci.yml", ".gitlab-ci.yml", ignore = TRUE ) if (!new) { return(invisible(FALSE)) } invisible(TRUE) } #' @section `use_circleci()`: #' Adds a basic `.circleci/config.yml` to the top-level directory of a package. #' This is a configuration file for the [CircleCI](https://circleci.com/) #' continuous integration service. #' @param browse Open a browser window to enable automatic builds for the #' package. #' @param image The Docker image to use for build. Must be available on #' [DockerHub](https://hub.docker.com). The #' [rocker/verse](https://hub.docker.com/r/rocker/verse) image includes #' TeXLive, pandoc, and the tidyverse packages. For a minimal image, try #' [rocker/r-ver](https://hub.docker.com/r/rocker/r-ver). To specify a version #' of R, change the tag from `latest` to the version you want, e.g. #' `rocker/r-ver:3.5.3`. #' @export #' @rdname use_gitlab_ci use_circleci <- function(browse = rlang::is_interactive(), image = "rocker/verse:latest") { repo_spec <- target_repo_spec() use_directory(".circleci", ignore = TRUE) new <- use_template( "circleci-config.yml", ".circleci/config.yml", data = list(package = project_name(), image = image), ignore = TRUE ) if (!new) { return(invisible(FALSE)) } use_circleci_badge(repo_spec) circleci_activate(spec_owner(repo_spec), browse) invisible(TRUE) } #' @section `use_circleci_badge()`: #' Only adds the [Circle CI](https://circleci.com/) badge. Use for a project #' where Circle CI is already configured. #' @rdname use_gitlab_ci #' @eval param_repo_spec() #' @export use_circleci_badge <- function(repo_spec = NULL) { repo_spec <- repo_spec %||% target_repo_spec() url <- glue("https://circleci.com/gh/{repo_spec}") img <- glue("{url}.svg?style=svg") use_badge("CircleCI build status", url, img) } circleci_activate <- function(owner, browse = is_interactive()) { url <- glue("https://circleci.com/add-projects/gh/{owner}") ui_bullets(c( "_" = "Turn on CircleCI for your repo at {.url {url}}." )) if (browse) { utils::browseURL(url) } } usethis/R/github.R0000644000176200001440000002147514717524762013574 0ustar liggesusers#' Connect a local repo with GitHub #' #' @description #' `use_github()` takes a local project and: #' * Checks that the initial state is good to go: #' - Project is already a Git repo #' - Current branch is the default branch, e.g. `main` or `master` #' - No uncommitted changes #' - No pre-existing `origin` remote #' * Creates an associated repo on GitHub #' * Adds that GitHub repo to your local repo as the `origin` remote #' * Makes an initial push to GitHub #' * Calls [use_github_links()], if the project is an R package #' * Configures `origin/DEFAULT` to be the upstream branch of the local #' `DEFAULT` branch, e.g. `main` or `master` #' #' See below for the authentication setup that is necessary for all of this to #' work. #' #' @template double-auth #' #' @param organisation If supplied, the repo will be created under this #' organisation, instead of the login associated with the GitHub token #' discovered for this `host`. The user's role and the token's scopes must be #' such that you have permission to create repositories in this #' `organisation`. #' @param private If `TRUE`, creates a private repository. #' @param visibility Only relevant for organisation-owned repos associated with #' certain GitHub Enterprise products. The special "internal" `visibility` #' grants read permission to all organisation members, i.e. it's intermediate #' between "private" and "public", within GHE. When specified, `visibility` #' takes precedence over `private = TRUE/FALSE`. #' @inheritParams git_protocol #' @param host GitHub host to target, passed to the `.api_url` argument of #' [gh::gh()]. If unspecified, gh defaults to "https://api.github.com", #' although gh's default can be customised by setting the GITHUB_API_URL #' environment variable. #' #' For a hypothetical GitHub Enterprise instance, either #' "https://github.acme.com/api/v3" or "https://github.acme.com" is #' acceptable. #' #' @export #' @examples #' \dontrun{ #' pkgpath <- file.path(tempdir(), "testpkg") #' create_package(pkgpath) #' #' ## now, working inside "testpkg", initialize git repository #' use_git() #' #' ## create github repository and configure as git remote #' use_github() #' } use_github <- function(organisation = NULL, private = FALSE, visibility = c("public", "private", "internal"), protocol = git_protocol(), host = NULL) { visibility_specified <- !missing(visibility) visibility <- match.arg(visibility) check_protocol(protocol) check_uses_git() default_branch <- guess_local_default_branch() check_current_branch( is = default_branch, # glue-ing happens inside check_current_branch(), where `gb` gives the # current branch message = c("x" = "Must be on the default branch {.val {is}}, not {.val {gb}}.") ) challenge_uncommitted_changes(msg = " There are uncommitted changes and we're about to create and push to a new \\ GitHub repo") check_no_origin() if (is.null(organisation)) { if (visibility_specified) { ui_abort(" The {.arg visibility} setting is only relevant for organisation-owned repos, within the context of certain GitHub Enterprise products.") } visibility <- if (private) "private" else "public" } if (!is.null(organisation) && !visibility_specified) { visibility <- if (private) "private" else "public" } whoami <- suppressMessages(gh::gh_whoami(.api_url = host)) if (is.null(whoami)) { ui_abort(c( "x" = "Unable to discover a GitHub personal access token.", "i" = "A token is required in order to create and push to a new repo.", "_" = "Call {.run usethis::gh_token_help()} for help configuring a token." )) } empirical_host <- parse_github_remotes(glue("{whoami$html_url}/REPO"))$host if (empirical_host != "github.com") { ui_bullets(c("i" = "Targeting the GitHub host {.val {empirical_host}}.")) } owner <- organisation %||% whoami$login repo_name <- project_name() check_no_github_repo(owner, repo_name, host) repo_desc <- if (is_package()) proj_desc()$get_field("Title") %||% "" else "" repo_desc <- gsub("\n", " ", repo_desc) repo_spec <- glue("{owner}/{repo_name}") visibility_string <- if (visibility == "public") "" else glue("{visibility} ") ui_bullets(c( "v" = "Creating {visibility_string}GitHub repository {.val {repo_spec}}." )) if (is.null(organisation)) { create <- gh::gh( "POST /user/repos", name = repo_name, description = repo_desc, private = private, .api_url = host ) } else { create <- gh::gh( "POST /orgs/{org}/repos", org = organisation, name = repo_name, description = repo_desc, visibility = visibility, # this is necessary to set `visibility` in GHE 2.22 (but not in 3.2) # hopefully it's harmless when not needed .accept = "application/vnd.github.nebula-preview+json", .api_url = host ) } origin_url <- switch( protocol, https = create$clone_url, ssh = create$ssh_url ) withr::defer(view_url(create$html_url)) ui_bullets(c("v" = "Setting remote {.val origin} to {.val {origin_url}}.")) use_git_remote("origin", origin_url) if (is_package()) { # we tryCatch(), because we can't afford any failure here to result in not # doing the first push and configuring the default branch # such an incomplete setup is hard to diagnose / repair post hoc tryCatch( use_github_links(), error = function(e) NULL ) } git_push_first(default_branch, "origin") repo <- git_repo() gbl <- gert::git_branch_list(local = TRUE, repo = repo) if (nrow(gbl) > 1) { ui_bullets(c("v" = "Setting {.val {default_branch}} as default branch on GitHub.")) gh::gh( "PATCH /repos/{owner}/{repo}", owner = owner, repo = repo_name, default_branch = default_branch, .api_url = host ) } invisible() } #' Use GitHub links in URL and BugReports #' #' @description #' Populates the `URL` and `BugReports` fields of a GitHub-using R package with #' appropriate links. The GitHub repo to link to is determined from the current #' project's GitHub remotes: #' * If we are not working with a fork, this function expects `origin` to be a #' GitHub remote and the links target that repo. #' * If we are working in a fork, this function expects to find two GitHub #' remotes: `origin` (the fork) and `upstream` (the fork's parent) remote. In #' an interactive session, the user can confirm which repo to use for the #' links. In a noninteractive session, links are formed using `upstream`. #' #' @param overwrite By default, `use_github_links()` will not overwrite existing #' fields. Set to `TRUE` to overwrite existing links. #' @export #' @examples #' \dontrun{ #' use_github_links() #' } #' use_github_links <- function(overwrite = FALSE) { check_is_package("use_github_links()") gh_url <- github_url_from_git_remotes() proj_desc_field_update("URL", gh_url, overwrite = overwrite, append = TRUE) proj_desc_field_update( "BugReports", glue("{gh_url}/issues"), overwrite = overwrite ) git_ask_commit( "Add GitHub links to DESCRIPTION", untracked = TRUE, paths = "DESCRIPTION" ) invisible() } has_github_links <- function(target_repo = NULL) { url <- if (is.null(target_repo)) NULL else target_repo$url github_url <- github_url_from_git_remotes(url) if (is.null(github_url)) { return(FALSE) } desc <- proj_desc() has_github_url <- github_url %in% desc$get_urls() bug_reports <- desc$get_field("BugReports", default = character()) has_github_issues <- glue("{github_url}/issues") %in% bug_reports has_github_url && has_github_issues } check_no_origin <- function() { remotes <- git_remotes() if ("origin" %in% names(remotes)) { ui_abort(c( "x" = "This repo already has an {.val origin} remote, with value {.val {remotes[['origin']]}}.", "i" = "You can remove this setting with:", " " = '{.code usethis::use_git_remote("origin", url = NULL, overwrite = TRUE)}' )) } invisible() } check_no_github_repo <- function(owner, repo, host) { spec <- glue("{owner}/{repo}") repo_found <- tryCatch( { repo_info <- gh::gh("/repos/{spec}", spec = spec, .api_url = host) # when does repo_info$full_name != the spec we sent? # this happens if you reuse the original name of a repo that has since # been renamed # there's no 404, because of the automatic redirect, but you CAN create # a new repo with this name # https://github.com/r-lib/usethis/issues/1893 repo_info$full_name == spec }, "http_error_404" = function(err) FALSE ) if (!repo_found) { return(invisible()) } empirical_host <- parse_github_remotes(repo_info$html_url)$host ui_abort("Repo {.val {spec}} already exists on {.val {empirical_host}}.") } usethis/R/directory.R0000644000176200001440000000271514651000165014271 0ustar liggesusers#' Use a directory #' #' `use_directory()` creates a directory (if it does not already exist) in the #' project's top-level directory. This function powers many of the other `use_` #' functions such as [use_data()] and [use_vignette()]. #' #' @param path Path of the directory to create, relative to the project. #' @inheritParams use_template #' #' @export #' @examples #' \dontrun{ #' use_directory("inst") #' } use_directory <- function(path, ignore = FALSE) { create_directory(proj_path(path)) if (ignore) { use_build_ignore(path) } invisible(TRUE) } create_directory <- function(path) { if (dir_exists(path)) { return(invisible(FALSE)) } else if (file_exists(path)) { ui_abort("{.path {pth(path)}} exists but is not a directory.") } dir_create(path, recurse = TRUE) ui_bullets(c("v" = "Creating {.path {pth(path)}}.")) invisible(TRUE) } check_path_is_directory <- function(path) { if (!file_exists(path)) { ui_abort("Directory {.path {pth(path)}} does not exist.") } if (is_link(path)) { path <- link_path(path) } if (!is_dir(path)) { ui_abort("{.path {pth(path)}} is not a directory.") } } count_directory_files <- function(x) { length(dir_ls(x)) } directory_has_files <- function(x) { count_directory_files(x) >= 1 } check_directory_is_empty <- function(x) { if (directory_has_files(x)) { ui_abort("{.path {pth(x)}} exists and is not an empty directory.") } invisible(x) } usethis/R/use-compat-file.R0000644000176200001440000000000014651000165015240 0ustar liggesusersusethis/R/code-of-conduct.R0000644000176200001440000000437314651000165015240 0ustar liggesusers#' Add a code of conduct #' #' Adds a `CODE_OF_CONDUCT.md` file to the active project and lists in #' `.Rbuildignore`, in the case of a package. The goal of a code of conduct is #' to foster an environment of inclusiveness, and to explicitly discourage #' inappropriate behaviour. The template comes from #' , version 2.1: #' . #' #' If your package is going to CRAN, the link to the CoC in your README must #' be an absolute link to a rendered website as `CODE_OF_CONDUCT.md` is not #' included in the package sent to CRAN. `use_code_of_conduct()` will #' automatically generate this link if (1) you use pkgdown and (2) have set the #' `url` field in `_pkgdown.yml`; otherwise it will link to a copy of the CoC #' on . #' #' @param contact Contact details for making a code of conduct report. #' Usually an email address. #' @param path Path of the directory to put `CODE_OF_CONDUCT.md` in, relative to #' the active project. Passed along to [use_directory()]. Default is to locate #' at top-level, but `.github/` is also common. #' #' @export use_code_of_conduct <- function(contact, path = NULL) { if (missing(contact)) { ui_abort(" {.fun use_code_of_conduct} requires contact details in first argument.") } new <- use_coc(contact = contact, path = path) href <- pkgdown_url(pedantic = TRUE) %||% "https://contributor-covenant.org/version/2/1" href <- sub("/$", "", href) href <- paste0(href, "/CODE_OF_CONDUCT.html") ui_bullets(c( "_" = "You may also want to describe the code of conduct in your README:" )) ui_code_snippet(" ## Code of Conduct Please note that the {project_name()} project is released with a \\ [Contributor Code of Conduct]({href}). By contributing to this project, \\ you agree to abide by its terms.", language = "" ) invisible(new) } use_coc <- function(contact, path = NULL) { if (!is.null(path)) { use_directory(path, ignore = is_package()) } save_as <- path_join(c(path, "CODE_OF_CONDUCT.md")) use_template( "CODE_OF_CONDUCT.md", save_as = save_as, data = list(contact = contact), ignore = is_package() && is.null(path) ) } usethis/R/utils-rematch2.R0000644000176200001440000000205514651000165015125 0ustar liggesusers# inlined from # https://github.com/r-lib/rematch2/commit/aab858e3411810fa107d20db6f936c6b10cbdf3f # EXCEPT I don't return a tibble re_match <- function(text, pattern, perl = TRUE, ...) { check_string(pattern) text <- as.character(text) match <- regexpr(pattern, text, perl = perl, ...) start <- as.vector(match) length <- attr(match, "match.length") end <- start + length - 1L matchstr <- substring(text, start, end) matchstr[ start == -1 ] <- NA_character_ res <- data.frame( stringsAsFactors = FALSE, .text = text, .match = matchstr ) if (!is.null(attr(match, "capture.start"))) { gstart <- attr(match, "capture.start") glength <- attr(match, "capture.length") gend <- gstart + glength - 1L groupstr <- substring(text, gstart, gend) groupstr[ gstart == -1 ] <- NA_character_ dim(groupstr) <- dim(gstart) res <- cbind(groupstr, res, stringsAsFactors = FALSE) } names(res) <- c(attr(match, "capture.names"), ".text", ".match") #class(res) <- c("tbl_df", "tbl", class(res)) res } usethis/R/news.R0000644000176200001440000000312214651514262013242 0ustar liggesusers#' Create a simple `NEWS.md` #' #' This creates a basic `NEWS.md` in the root directory. #' #' @inheritParams use_template #' @seealso The [other markdown files #' section](https://r-pkgs.org/other-markdown.html) of [R #' Packages](https://r-pkgs.org). #' @export use_news_md <- function(open = rlang::is_interactive()) { check_is_package("use_news_md()") version <- if (is_dev_version()) "(development version)" else proj_version() on_cran <- !is.null(cran_version()) if (on_cran) { init_bullet <- "Added a `NEWS.md` file to track changes to the package." } else { init_bullet <- "Initial CRAN submission." } use_template( "NEWS.md", data = list( Package = project_name(), Version = version, InitialBullet = init_bullet ), open = open ) git_ask_commit("Add NEWS.md", untracked = TRUE, paths = "NEWS.md") } use_news_heading <- function(version) { news_path <- proj_path("NEWS.md") if (!file_exists(news_path)) { return(invisible()) } news <- read_utf8(news_path) idx <- match(TRUE, grepl("[^[:space:]]", news)) if (is.na(idx)) { return(news) } title <- glue("# {project_name()} {version}") if (title == news[[idx]]) { return(invisible()) } development_title <- glue("# {project_name()} (development version)") if (development_title == news[[idx]]) { news[[idx]] <- title ui_bullets(c("v" = "Replacing development heading in {.path NEWS.md}.")) return(write_utf8(news_path, news)) } ui_bullets(c("v" = "Adding new heading to {.path NEWS.md}.")) write_utf8(news_path, c(title, "", news)) } usethis/R/license.R0000644000176200001440000001403014651000165013700 0ustar liggesusers#' License a package #' #' @description #' Adds the necessary infrastructure to declare your package as licensed #' with one of these popular open source licenses: #' #' Permissive: #' * [MIT](https://choosealicense.com/licenses/mit/): simple and permissive. #' * [Apache 2.0](https://choosealicense.com/licenses/apache-2.0/): MIT + #' provides patent protection. #' #' Copyleft: #' * [GPL v2](https://choosealicense.com/licenses/gpl-2.0/): requires sharing #' of improvements. #' * [GPL v3](https://choosealicense.com/licenses/gpl-3.0/): requires sharing #' of improvements. #' * [AGPL v3](https://choosealicense.com/licenses/agpl-3.0/): requires sharing #' of improvements. #' * [LGPL v2.1](https://choosealicense.com/licenses/lgpl-2.1/): requires sharing #' of improvements. #' * [LGPL v3](https://choosealicense.com/licenses/lgpl-3.0/): requires sharing #' of improvements. #' #' Creative commons licenses appropriate for data packages: #' * [CC0](https://creativecommons.org/publicdomain/zero/1.0/): dedicated #' to public domain. #' * [CC-BY](https://creativecommons.org/licenses/by/4.0/): Free to share and #' adapt, must give appropriate credit. #' #' See for more details and other options. #' #' Alternatively, for code that you don't want to share with others, #' `use_proprietary_license()` makes it clear that all rights are reserved, #' and the code is not open source. #' #' @details #' CRAN does not permit you to include copies of standard licenses in your #' package, so these functions save the license as `LICENSE.md` and add it #' to `.Rbuildignore`. #' #' @name licenses #' @param copyright_holder Name of the copyright holder or holders. This #' defaults to `"{package name} authors"`; you should only change this if you #' use a CLA to assign copyright to a single entity. #' @param version License version. This defaults to latest version all licenses. #' @param include_future If `TRUE`, will license your package under the current #' and any potential future versions of the license. This is generally #' considered to be good practice because it means your package will #' automatically include "bug" fixes in licenses. #' @seealso For more details, refer to the the #' [license chapter](https://r-pkgs.org/license.html) in _R Packages_. #' @aliases NULL NULL #' @rdname licenses #' @export use_mit_license <- function(copyright_holder = NULL) { data <- list( year = format(Sys.Date(), "%Y"), copyright_holder = copyright_holder %||% glue("{project_name()} authors") ) if (is_package()) { proj_desc_field_update("License", "MIT + file LICENSE", overwrite = TRUE) use_template("year-copyright.txt", save_as = "LICENSE", data = data) } use_license_template("mit", data) } #' @rdname licenses #' @export use_gpl_license <- function(version = 3, include_future = TRUE) { version <- check_license_version(version, 2:3) if (is_package()) { abbr <- license_abbr("GPL", version, include_future) proj_desc_field_update("License", abbr, overwrite = TRUE) } use_license_template(glue("GPL-{version}")) } #' @rdname licenses #' @export use_agpl_license <- function(version = 3, include_future = TRUE) { version <- check_license_version(version, 3) if (is_package()) { abbr <- license_abbr("AGPL", version, include_future) proj_desc_field_update("License", abbr, overwrite = TRUE) } use_license_template(glue("AGPL-{version}")) } #' @rdname licenses #' @export use_lgpl_license <- function(version = 3, include_future = TRUE) { version <- check_license_version(version, c(2.1, 3)) if (is_package()) { abbr <- license_abbr("LGPL", version, include_future) proj_desc_field_update("License", abbr, overwrite = TRUE) } use_license_template(glue("LGPL-{version}")) } #' @rdname licenses #' @export use_apache_license <- function(version = 2, include_future = TRUE) { version <- check_license_version(version, 2) if (is_package()) { abbr <- license_abbr("Apache License", version, include_future) proj_desc_field_update("License", abbr, overwrite = TRUE) } use_license_template(glue("apache-{version}")) } #' @rdname licenses #' @export use_cc0_license <- function() { if (is_package()) { proj_desc_field_update("License", "CC0", overwrite = TRUE) } use_license_template("cc0") } #' @rdname licenses #' @export use_ccby_license <- function() { if (is_package()) { proj_desc_field_update("License", "CC BY 4.0", overwrite = TRUE) } use_license_template("ccby-4") } #' @rdname licenses #' @export use_proprietary_license <- function(copyright_holder) { data <- list( year = year(), copyright_holder = copyright_holder ) if (is_package()) { proj_desc_field_update("License", "file LICENSE", overwrite = TRUE) } use_template("license-proprietary.txt", save_as = "LICENSE", data = data) } # Fallbacks --------------------------------------------------------------- #' @rdname licenses #' @export #' @usage NULL use_gpl3_license <- function() { use_gpl_license(3) } #' @rdname licenses #' @export #' @usage NULL use_agpl3_license <- function() { use_agpl_license(3) } #' @rdname licenses #' @export #' @usage NULL use_apl2_license <- function() { use_apache_license(2) } # Helpers ----------------------------------------------------------------- use_license_template <- function(license, data = list()) { license_template <- glue("license-{license}.md") use_template(license_template, save_as = "LICENSE.md", data = data, ignore = TRUE ) } check_license_version <- function(version, possible) { version <- as.double(version) if (!version %in% possible) { ui_abort("{.arg version} must be {.or {possible}}.") } version } license_abbr <- function(name, version, include_future) { if (include_future) { glue_chr("{name} (>= {version})") } else { if (name %in% c("GPL", "LGPL", "AGPL")) { # Standard abbreviations listed at # https://cran.rstudio.com/doc/manuals/r-devel/R-exts.html#Licensing glue_chr("{name}-{version}") } else { glue_chr("{name} (== {version})") } } } usethis/R/ignore.R0000644000176200001440000000211714651000165013544 0ustar liggesusers#' Add files to `.Rbuildignore` #' #' @description #' `.Rbuildignore` has a regular expression on each line, but it's #' usually easier to work with specific file names. By default, #' `use_build_ignore()` will (crudely) turn a filename into a regular #' expression that will only match that path. Repeated entries will be #' silently removed. #' #' `use_build_ignore()` is designed to ignore *individual* files. If you #' want to ignore *all* files with a given extension, consider providing #' an "as-is" regular expression, using `escape = FALSE`; see examples. #' #' @param files Character vector of path names. #' @param escape If `TRUE`, the default, will escape `.` to #' `\\.` and surround with `^` and `$`. #' @export #' @examples #' \dontrun{ #' # ignore all Excel files #' use_build_ignore("[.]xlsx$", escape = FALSE) #' } use_build_ignore <- function(files, escape = TRUE) { if (escape) { files <- escape_path(files) } write_union(proj_path(".Rbuildignore"), files) } escape_path <- function(x) { x <- gsub("\\.", "\\\\.", x) x <- gsub("/$", "", x) paste0("^", x, "$") } usethis/R/usethis-deprecated.R0000644000176200001440000001112614717524721016057 0ustar liggesusers#' Deprecated Git functions #' #' @description #' `r lifecycle::badge("deprecated")` #' #' `git_branch_default()` has been replaced by [git_default_branch()]. #' #' @keywords internal #' @export git_branch_default <- function() { lifecycle::deprecate_warn("2.1.0", "git_branch_default()", "git_default_branch()") git_default_branch() } #' Deprecated badge function #' #' @description #' `r lifecycle::badge("deprecated")` #' #' `use_rscloud_badge()` has been replaced by [use_posit_cloud_badge()]. #' #' @keywords internal #' @export use_rscloud_badge <- function(url) { lifecycle::deprecate_stop( "2.2.0", "use_rscloud_badge()", "use_posit_cloud_badge()" ) } #' Deprecated tidyverse functions #' #' @description #' `r lifecycle::badge("deprecated")` #' #' * `use_tidy_eval()` is deprecated because there's no longer a need to #' systematically import and re-export a large number of functions in order #' to use tidy evaluation. Instead, use [use_import_from()] to tactically #' import functions as you need them. #' @keywords internal #' @export use_tidy_eval <- function() { lifecycle::deprecate_stop( "2.2.0", "use_tidy_eval()", details = c( "There is no longer a need to systematically import and/or re-export functions", "Instead import functions as needed, with e.g.:", 'usethis::use_import_from("rlang", c(".data", ".env"))' ) ) } # GitHub actions -------------------------------------------------------------- #' Deprecated GitHub Actions functions #' #' @description #' `r lifecycle::badge("deprecated")` #' #' * `use_github_actions()` is deprecated because it was just an alias #' for [use_github_action_check_release()]. #' #' * `use_github_action_check_full()` is overkill for most packages and is #' not recommended. #' #' * `use_github_action_check_release()`, `use_github_action_check_standard()`, #' and `use_github_action_pr_commands()` are deprecated in favor of #' [use_github_action()], which can now suggest specific workflows to use. #' #' @export #' @keywords internal use_github_actions <- function() { lifecycle::deprecate_warn( when = "2.2.0", what = "use_github_actions()", with = "use_github_action('check-release')" ) use_github_action('check-release') } #' @rdname use_github_actions #' @export use_github_action_check_release <- function(save_as = "R-CMD-check.yaml", ref = NULL, ignore = TRUE, open = FALSE) { lifecycle::deprecate_warn( when = "2.2.0", what = "use_github_action_check_release()", with = "use_github_action('check-release')" ) use_github_action( "check-release.yaml", ref = ref, save_as = save_as, ignore = ignore, open = open ) use_github_actions_badge(save_as) } #' @rdname use_github_actions #' @export use_github_action_check_standard <- function(save_as = "R-CMD-check.yaml", ref = NULL, ignore = TRUE, open = FALSE) { lifecycle::deprecate_warn( when = "2.2.0", what = "use_github_action_check_standard()", with = "use_github_action('check-standard')" ) use_github_action( "check-standard.yaml", ref = ref, save_as = save_as, ignore = ignore, open = open ) use_github_actions_badge(save_as) } #' @rdname use_github_actions #' @export use_github_action_pr_commands <- function(save_as = "pr-commands.yaml", ref = NULL, ignore = TRUE, open = FALSE) { lifecycle::deprecate_warn( when = "2.2.0", what = "use_github_action_pr_commands()", with = "use_github_action('pr-commands')" ) use_github_action( "pr-commands.yaml", ref = ref, save_as = save_as, ignore = ignore, open = open ) } #' @rdname use_github_actions #' @export use_github_action_check_full <- function(save_as = "R-CMD-check.yaml", ignore = TRUE, open = FALSE, repo_spec = NULL) { details <- glue(" It is overkill for the vast majority of R packages. The \"check-full\" workflow is among those configured by \\ `use_tidy_github_actions()`. If you really want it, request it by name with `use_github_action()`.") lifecycle::deprecate_stop( "2.1.0", "use_github_action_check_full()", details = details ) } usethis/R/rename-files.R0000644000176200001440000000725114717524721014650 0ustar liggesusers#' Automatically rename paired `R/` and `test/` files #' #' @description #' * Moves `R/{old}.R` to `R/{new}.R` #' * Moves `src/{old}.*` to `src/{new}.*` #' * Moves `tests/testthat/test-{old}.R` to `tests/testthat/test-{new}.R` #' * Moves `tests/testthat/test-{old}-*.*` to `tests/testthat/test-{new}-*.*` #' and updates paths in the test file. #' * Removes `context()` calls from the test file, which are unnecessary #' (and discouraged) as of testthat v2.1.0. #' #' This is a potentially dangerous operation, so you must be using Git in #' order to use this function. #' #' @param old,new Old and new file names (with or without `.R` extensions). #' @export rename_files <- function(old, new) { check_uses_git() challenge_uncommitted_changes( msg = " There are uncommitted changes and we're about to bulk-rename files. It is \\ highly recommended to get into a clean Git state before bulk-editing files", untracked = TRUE ) old <- sub("\\.R$", "", old) new <- sub("\\.R$", "", new) # R/ ------------------------------------------------------------------------ r_old_path <- proj_path("R", old, ext = "R") r_new_path <- proj_path("R", new, ext = "R") if (file_exists(r_old_path)) { ui_bullets(c( "v" = "Moving {.path {pth(r_old_path)}} to {.path {pth(r_new_path)}}." )) file_move(r_old_path, r_new_path) } # src/ ------------------------------------------------------------------------ if (dir_exists(proj_path("src"))) { src_old <- dir_ls(proj_path("src"), glob = glue("*/src/{old}.*")) src_new_file <- gsub(glue("^{old}"), glue("{new}"), path_file(src_old)) src_new <- path(path_dir(src_old), src_new_file) if (length(src_old) > 1) { ui_bullets(c( "v" = "Moving {.path {pth(src_old)}} to {.path {pth(src_new)}}." )) file_move(src_old, src_new) } } # tests/testthat/ ------------------------------------------------------------ if (!uses_testthat()) { return(invisible()) } rename_test <- function(path) { file <- gsub(glue("^test-{old}"), glue("test-{new}"), path_file(path)) file <- gsub(glue("^{old}.md"), glue("{new}.md"), file) path(path_dir(path), file) } old_test <- dir_ls( proj_path("tests", "testthat"), glob = glue("*/test-{old}*") ) new_test <- rename_test(old_test) if (length(old_test) > 0) { ui_bullets(c( "v" = "Moving {.path {pth(old_test)}} to {.path {pth(new_test)}}." )) file_move(old_test, new_test) } snaps_dir <- proj_path("tests", "testthat", "_snaps") if (dir_exists(snaps_dir)) { old_snaps <- dir_ls(snaps_dir, glob = glue("*/{old}.md")) if (length(old_snaps) > 0) { new_snaps <- rename_test(old_snaps) ui_bullets(c( "v" = "Moving {.path {pth(old_snaps)}} to {.path {pth(new_snaps)}}." )) file_move(old_snaps, new_snaps) } } # tests/testthat/test-{new}.R ------------------------------------------------ test_path <- proj_path("tests", "testthat", glue("test-{new}"), ext = "R") if (!file_exists(test_path)) { return(invisible()) } lines <- read_utf8(test_path) # Remove old context lines context <- grepl("context\\(.*\\)", lines) if (any(context)) { ui_bullets(c("v" = "Removing call to {.fun context}.")) lines <- lines[!context] if (lines[[1]] == "") { lines <- lines[-1] } } old_test <- old_test[new_test != test_path] new_test <- new_test[new_test != test_path] if (length(old_test) > 0) { ui_bullets(c("v" = "Updating paths in {.path {pth(test_path)}}.")) for (i in seq_along(old_test)) { lines <- gsub(path_file(old_test[[i]]), path_file(new_test[[i]]), lines, fixed = TRUE) } } write_utf8(test_path, lines) } usethis/R/readme.R0000644000176200001440000000612614651000165013522 0ustar liggesusers#' Create README files #' #' @description #' Creates skeleton README files with possible stubs for #' * a high-level description of the project/package and its goals #' * R code to install from GitHub, if GitHub usage detected #' * a basic example #' #' Use `Rmd` if you want a rich intermingling of code and output. Use `md` for a #' basic README. `README.Rmd` will be automatically added to `.Rbuildignore`. #' The resulting README is populated with default YAML frontmatter and R fenced #' code blocks (`md`) or chunks (`Rmd`). #' #' If you use `Rmd`, you'll still need to render it regularly, to keep #' `README.md` up-to-date. `devtools::build_readme()` is handy for this. You #' could also use GitHub Actions to re-render `README.Rmd` every time you push. #' An example workflow can be found in the `examples/` directory here: #' . #' #' If the current project is a Git repo, then `use_readme_rmd()` automatically #' configures a pre-commit hook that helps keep `README.Rmd` and `README.md`, #' synchronized. The hook creates friction if you try to commit when #' `README.Rmd` has been edited more recently than `README.md`. If this hook #' causes more problems than it solves for you, it is implemented in #' `.git/hooks/pre-commit`, which you can modify or even delete. #' #' @inheritParams use_template #' @seealso The [other markdown files #' section](https://r-pkgs.org/other-markdown.html) of [R #' Packages](https://r-pkgs.org). #' @export #' @examples #' \dontrun{ #' use_readme_rmd() #' use_readme_md() #' } use_readme_rmd <- function(open = rlang::is_interactive()) { check_is_project() check_installed("rmarkdown") is_pkg <- is_package() repo_spec <- tryCatch(target_repo_spec(ask = FALSE), error = function(e) NULL) nm <- if (is_pkg) "Package" else "Project" data <- list2( !!nm := project_name(), Rmd = TRUE, on_github = !is.null(repo_spec), github_spec = repo_spec ) new <- use_template( if (is_pkg) "package-README" else "project-README", "README.Rmd", data = data, ignore = is_pkg, open = open ) if (!new) { return(invisible(FALSE)) } if (is_pkg && !data$on_github) { ui_bullets(c( "_" = "Update {.path {pth('README.Rmd')}} to include installation instructions." )) } if (uses_git()) { use_git_hook( "pre-commit", render_template("readme-rmd-pre-commit.sh") ) } invisible(TRUE) } #' @export #' @rdname use_readme_rmd use_readme_md <- function(open = rlang::is_interactive()) { check_is_project() is_pkg <- is_package() repo_spec <- tryCatch(target_repo_spec(ask = FALSE), error = function(e) NULL) nm <- if (is_pkg) "Package" else "Project" data <- list2( !!nm := project_name(), Rmd = FALSE, on_github = !is.null(repo_spec), github_spec = repo_spec ) new <- use_template( if (is_pkg) "package-README" else "project-README", "README.md", data = data, open = open ) if (is_pkg && !data$on_github) { ui_bullets(c( "_" = "Update {.path {pth('README.md')}} to include installation instructions." )) } invisible(new) } usethis/R/data.R0000644000176200001440000001100014717524762013202 0ustar liggesusers#' Create package data #' #' `use_data()` makes it easy to save package data in the correct format. I #' recommend you save scripts that generate package data in `data-raw`: use #' `use_data_raw()` to set it up. You also need to document exported datasets. #' #' @param ... Unquoted names of existing objects to save. #' @param internal If `FALSE`, saves each object in its own `.rda` #' file in the `data/` directory. These data files bypass the usual #' export mechanism and are available whenever the package is loaded #' (or via [data()] if `LazyData` is not true). #' #' If `TRUE`, stores all objects in a single `R/sysdata.rda` file. #' Objects in this file follow the usual export rules. Note that this means #' they will be exported if you are using the common `exportPattern()` #' rule which exports all objects except for those that start with `.`. #' @param overwrite By default, `use_data()` will not overwrite existing #' files. If you really want to do so, set this to `TRUE`. #' @param compress Choose the type of compression used by [save()]. #' Should be one of "gzip", "bzip2", or "xz". #' @param version The serialization format version to use. The default, 3, can #' only be read by R versions 3.5.0 and higher. For R 1.4.0 to 3.5.3, use #' version 2. #' @inheritParams base::save #' #' @seealso The [data chapter](https://r-pkgs.org/data.html) of [R #' Packages](https://r-pkgs.org). #' @export #' @examples #' \dontrun{ #' x <- 1:10 #' y <- 1:100 #' #' use_data(x, y) # For external use #' use_data(x, y, internal = TRUE) # For internal use #' } use_data <- function(..., internal = FALSE, overwrite = FALSE, compress = "bzip2", version = 3, ascii = FALSE) { check_is_package("use_data()") objs <- get_objs_from_dots(dots(...)) original_minimum_r_version <- pkg_minimum_r_version() serialization_minimum_r_version <- if (version < 3) "2.10" else "3.5" if (is.na(original_minimum_r_version) || original_minimum_r_version < serialization_minimum_r_version) { use_dependency("R", "depends", serialization_minimum_r_version) } if (internal) { use_directory("R") paths <- path("R", "sysdata.rda") objs <- list(objs) } else { use_directory("data") paths <- path("data", objs, ext = "rda") desc <- proj_desc() if (!desc$has_fields("LazyData")) { ui_bullets(c( "v" = "Setting {.field LazyData} to {.val true} in {.path DESCRIPTION}.")) desc$set(LazyData = "true") desc$write() } } check_files_absent(proj_path(paths), overwrite = overwrite) ui_bullets(c( "v" = "Saving {.val {unlist(objs)}} to {.val {paths}}.")) if (!internal) { ui_bullets(c( "_" = "Document your data (see {.url https://r-pkgs.org/data.html})." )) } envir <- parent.frame() mapply( save, list = objs, file = proj_path(paths), MoreArgs = list(envir = envir, compress = compress, version = version, ascii = ascii) ) invisible() } get_objs_from_dots <- function(.dots) { if (length(.dots) == 0L) { ui_abort("Nothing to save.") } is_name <- vapply(.dots, is.symbol, logical(1)) if (!all(is_name)) { ui_abort("Can only save existing named objects.") } objs <- vapply(.dots, as.character, character(1)) duplicated_objs <- which(stats::setNames(duplicated(objs), objs)) if (length(duplicated_objs) > 0L) { objs <- unique(objs) ui_bullets(c( "!" = "Saving duplicates only once: {.val {names(duplicated_objs)}}." )) } objs } check_files_absent <- function(paths, overwrite) { if (overwrite) { return() } ok <- !file_exists(paths) if (all(ok)) { return() } ui_abort(c( "{.path {pth(paths[!ok])}} already exist.", "Use {.code overwrite = TRUE} to overwrite." )) } #' @param name Name of the dataset to be prepared for inclusion in the package. #' @inheritParams use_template #' @rdname use_data #' @export #' @examples #' \dontrun{ #' use_data_raw("daisy") #' } use_data_raw <- function(name = "DATASET", open = rlang::is_interactive()) { check_name(name) r_path <- path("data-raw", asciify(name), ext = "R") use_directory("data-raw", ignore = TRUE) use_template( "packagename-data-prep.R", save_as = r_path, data = list(name = name), ignore = FALSE, open = open ) ui_bullets(c( "_" = "Finish writing the data preparation script in {.path {pth(r_path)}}.", "_" = "Use {.fun usethis::use_data} to add prepared data to package." )) } usethis/R/rmarkdown.R0000644000176200001440000000341414651000165014266 0ustar liggesusers#' Add an RMarkdown Template #' #' Adds files and directories necessary to add a custom rmarkdown template to #' RStudio. It creates: #' * `inst/rmarkdown/templates/{{template_dir}}`. Main directory. #' * `skeleton/skeleton.Rmd`. Your template Rmd file. #' * `template.yml` with basic information filled in. #' #' @param template_name The name as printed in the template menu. #' @param template_dir Name of the directory the template will live in within #' `inst/rmarkdown/templates`. If none is provided by the user, it will be #' created from `template_name`. #' @param template_description Sets the value of `description` in #' `template.yml`. #' @param template_create_dir Sets the value of `create_dir` in `template.yml`. #' #' @export #' @examples #' \dontrun{ #' use_rmarkdown_template() #' } use_rmarkdown_template <- function(template_name = "Template Name", template_dir = NULL, template_description = "A description of the template", template_create_dir = FALSE) { # Process some of the inputs template_dir <- template_dir %||% tolower(asciify(template_name)) template_create_dir <- as.character(template_create_dir) template_dir <- path("inst", "rmarkdown", "templates", template_dir) # Scaffold files use_directory(path(template_dir, "skeleton")) use_template( "rmarkdown-template.yml", data = list( template_dir = template_dir, template_name = template_name, template_description = template_description, template_create_dir = template_create_dir ), save_as = path(template_dir, "template.yaml") ) use_template( "rmarkdown-template.Rmd", path(template_dir, "skeleton", "skeleton.Rmd") ) invisible(TRUE) } usethis/R/badge.R0000644000176200001440000001347214717524721013345 0ustar liggesusers#' README badges #' #' These helpers produce the markdown text you need in your README to include #' badges that report information, such as the CRAN version or test coverage, #' and link out to relevant external resources. To add badges automatically #' ensure your badge block starts with a line containing only #' `` and ends with a line containing only #' ``. #' #' @details #' #' * `use_badge()`: a general helper used in all badge functions #' * `use_bioc_badge()`: badge indicates [BioConductor build #' status](https://bioconductor.org/developers/) #' * `use_cran_badge()`: badge indicates what version of your package is #' available on CRAN, powered by #' * `use_lifecycle_badge()`: badge declares the developmental stage of a #' package according to . #' * `use_binder_badge()`: badge indicates that your repository can be launched #' in an executable environment on #' * `use_posit_cloud_badge()`: badge indicates that your repository can be launched #' in a [Posit Cloud](https://posit.cloud) project #' * `use_rscloud_badge()`: `r lifecycle::badge("deprecated")`: Use #' [use_posit_cloud_badge()] instead. #' #' @param badge_name Badge name. Used in error message and alt text #' @param href,src Badge link and image src #' @param stage Stage of the package lifecycle. One of "experimental", #' "stable", "superseded", or "deprecated". #' @seealso Functions that configure continuous integration, such as #' [use_github_actions()], also create badges. #' #' @name badges #' @examples #' \dontrun{ #' use_cran_badge() #' use_lifecycle_badge("stable") #' } NULL #' @rdname badges #' @export use_badge <- function(badge_name, href, src) { path <- find_readme() if (is.null(path)) { ui_bullets(c( "!" = "Can't find a README for the current project.", "i" = "See {.fun usethis::use_readme_rmd} for help creating this file.", "i" = "Badge link will only be printed to screen." )) path <- "README" } changed <- block_append( glue("{badge_name} badge"), glue("[![{badge_name}]({src})]({href})"), path = path, block_start = badge_start, block_end = badge_end ) if (changed && path_ext(path) == "Rmd") { ui_bullets(c( "_" = "Re-knit {.path {pth(path)}} with {.run devtools::build_readme()}." )) } invisible(changed) } #' @rdname badges #' @export use_cran_badge <- function() { check_is_package("use_cran_badge()") pkg <- project_name() src <- glue("https://www.r-pkg.org/badges/version/{pkg}") href <- glue("https://CRAN.R-project.org/package={pkg}") use_badge("CRAN status", href, src) invisible(TRUE) } #' @rdname badges #' @export use_bioc_badge <- function() { check_is_package("use_bioc_badge()") pkg <- project_name() src <- glue("http://www.bioconductor.org/shields/build/release/bioc/{pkg}.svg") href <- glue("https://bioconductor.org/checkResults/release/bioc-LATEST/{pkg}") use_badge("BioC status", href, src) invisible(TRUE) } #' @rdname badges #' @export use_lifecycle_badge <- function(stage) { check_is_package("use_lifecycle_badge()") pkg <- project_name() stage <- tolower(stage) stage <- arg_match0(stage, names(stages)) colour <- stages[[stage]] src <- glue("https://img.shields.io/badge/lifecycle-{stage}-{colour}.svg") href <- glue("https://lifecycle.r-lib.org/articles/stages.html#{stage}") use_badge(paste0("Lifecycle: ", stage), href, src) invisible(TRUE) } stages <- c( experimental = "orange", stable = "brightgreen", superseded = "blue", deprecated = "orange" ) #' @rdname badges #' @param ref A Git branch, tag, or SHA #' @param urlpath An optional `urlpath` component to add to the link, e.g. #' `"rstudio"` to open an RStudio IDE instead of a Jupyter notebook. See the #' [binder #' documentation](https://mybinder.readthedocs.io/en/latest/howto/user_interface.html) #' for additional examples. #' @export use_binder_badge <- function(ref = git_default_branch(), urlpath = NULL) { repo_spec <- target_repo_spec() if (is.null(urlpath)) { urlpath <- "" } else { urlpath <- glue("?urlpath={urlpath}") } url <- glue("https://mybinder.org/v2/gh/{repo_spec}/{ref}{urlpath}") img <- "https://mybinder.org/badge_logo.svg" use_badge("Launch binder", url, img) invisible(TRUE) } #' @rdname badges #' @param url A link to an existing [Posit Cloud](https://posit.cloud) #' project. See the [Posit Cloud #' documentation](https://posit.cloud/learn/guide#project-settings-access) #' for details on how to set project access and obtain a project link. #' @export use_posit_cloud_badge <- function(url) { check_name(url) project_url <- "posit[.]cloud/content" spaces_url <- "posit[.]cloud/spaces" if (grepl(project_url, url) || grepl(spaces_url, url)) { # TODO: Get posit logo hosted at https://github.com/simple-icons/simple-icons/ # and add to end of img url as `?logo=posit` (or whatever slug we get) img <- "https://img.shields.io/badge/launch-posit%20cloud-447099?style=flat" use_badge("Launch Posit Cloud", url, img) } else { ui_abort(" {.fun usethis::use_posit_cloud_badge} requires a link to an existing Posit Cloud project of the form {.val https://posit.cloud/content/} or {.val https://posit.cloud/spaces//content/}.") } invisible(TRUE) } has_badge <- function(href) { readme_path <- proj_path("README.md") if (!file_exists(readme_path)) { return(FALSE) } readme <- read_utf8(readme_path) any(grepl(href, readme, fixed = TRUE)) } # Badge data structure ---------------------------------------------------- badge_start <- "" badge_end <- "" find_readme <- function() { path_first_existing(proj_path(c("README.Rmd", "README.md"))) } usethis/R/revdep.R0000644000176200001440000000142114651000165013543 0ustar liggesusers#' Reverse dependency checks #' #' Performs set up for checking the reverse dependencies of an R package, as #' implemented by the revdepcheck package: #' * Creates `revdep/` directory and adds it to `.Rbuildignore` #' * Populates `revdep/.gitignore` to prevent tracking of various revdep #' artefacts #' * Prompts user to run the checks with `revdepcheck::revdep_check()` #' #' @export use_revdep <- function() { check_is_package("use_revdep()") use_directory("revdep", ignore = TRUE) use_git_ignore( directory = "revdep", c( "checks", "library", "checks.noindex", "library.noindex", "cloud.noindex", "data.sqlite", "*.html" ) ) ui_bullets(c( "_" = "Run checks with {.run revdepcheck::revdep_check(num_workers = 4)}." )) invisible() } usethis/R/create.R0000644000176200001440000003322414717524721013543 0ustar liggesusers#' Create a package or project #' #' @description #' These functions create an R project: #' * `create_package()` creates an R package #' * `create_project()` creates a non-package project, i.e. a data analysis #' project #' #' Both functions can be called on an existing project; you will be asked before #' any existing files are changed. #' #' @inheritParams use_description #' @param fields A named list of fields to add to `DESCRIPTION`, potentially #' overriding default values. See [use_description()] for how you can set #' personalized defaults using package options. #' @param path A path. If it exists, it is used. If it does not exist, it is #' created, provided that the parent path exists. #' @param roxygen Do you plan to use roxygen2 to document your package? #' @param rstudio If `TRUE`, calls [use_rstudio()] to make the new package or #' project into an [RStudio #' Project](https://r-pkgs.org/workflow101.html#sec-workflow101-rstudio-projects). #' If `FALSE` and a non-package project, a sentinel `.here` file is placed so #' that the directory can be recognized as a project by the #' [here](https://here.r-lib.org) or #' [rprojroot](https://rprojroot.r-lib.org) packages. #' @param open If `TRUE`, [activates][proj_activate()] the new project: #' #' * If using RStudio desktop, the package is opened in a new session. #' * If on RStudio server, the current RStudio project is activated. #' * Otherwise, the working directory and active project is changed. #' #' @return Path to the newly created project or package, invisibly. #' @seealso [create_tidy_package()] is a convenience function that extends #' `create_package()` by immediately applying as many of the tidyverse #' development conventions as possible. #' @export create_package <- function(path, fields = list(), rstudio = rstudioapi::isAvailable(), roxygen = TRUE, check_name = TRUE, open = rlang::is_interactive()) { path <- user_path_prep(path) check_path_is_directory(path_dir(path)) name <- path_file(path_abs(path)) if (check_name) { check_package_name(name) } challenge_nested_project(path_dir(path), name) challenge_home_directory(path) create_directory(path) local_project(path, force = TRUE) use_directory("R") proj_desc_create(name, fields, roxygen) use_namespace(roxygen = roxygen) if (rstudio) { use_rstudio() } if (open) { if (proj_activate(proj_get())) { # working directory/active project already set; clear the scheduled # restoration of the original project withr::deferred_clear() } } invisible(proj_get()) } #' @export #' @rdname create_package create_project <- function(path, rstudio = rstudioapi::isAvailable(), open = rlang::is_interactive()) { path <- user_path_prep(path) name <- path_file(path_abs(path)) challenge_nested_project(path_dir(path), name) challenge_home_directory(path) create_directory(path) local_project(path, force = TRUE) use_directory("R") if (rstudio) { use_rstudio() } else { ui_bullets(c( "v" = "Writing a sentinel file {.path {pth('.here')}}.", "_" = "Build robust paths within your project via {.fun here::here}.", "i" = "Learn more at {.url https://here.r-lib.org}." )) file_create(proj_path(".here")) } if (open) { if (proj_activate(proj_get())) { # working directory/active project already set; clear the scheduled # restoration of the original project withr::deferred_clear() } } invisible(proj_get()) } #' Create a project from a GitHub repo #' #' @description #' Creates a new local project and Git repository from a repo on GitHub, by #' either cloning or #' [fork-and-cloning](https://docs.github.com/en/get-started/quickstart/fork-a-repo). #' In the fork-and-clone case, `create_from_github()` also does additional #' remote and branch setup, leaving you in the perfect position to make a pull #' request with [pr_init()], one of several [functions for working with pull #' requests][pull-requests]. #' #' `create_from_github()` works best when your GitHub credentials are #' discoverable. See below for more about authentication. #' #' @template double-auth #' #' @seealso #' * [use_github()] to go the opposite direction, i.e. create a GitHub repo #' from your local repo #' * [git_protocol()] for background on `protocol` (HTTPS vs SSH) #' * [use_course()] to download a snapshot of all files in a GitHub repo, #' without the need for any local or remote Git operations #' #' @inheritParams create_package #' @param repo_spec A string identifying the GitHub repo in one of these forms: #' * Plain `OWNER/REPO` spec #' * Browser URL, such as `"https://github.com/OWNER/REPO"` #' * HTTPS Git URL, such as `"https://github.com/OWNER/REPO.git"` #' * SSH Git URL, such as `"git@github.com:OWNER/REPO.git"` #' @param destdir Destination for the new folder, which will be named according #' to the `REPO` extracted from `repo_spec`. Defaults to the location stored #' in the global option `usethis.destdir`, if defined, or to the user's #' Desktop or similarly conspicuous place otherwise. #' @param fork If `FALSE`, we clone `repo_spec`. If `TRUE`, we fork #' `repo_spec`, clone that fork, and do additional setup favorable for #' future pull requests: #' * The source repo, `repo_spec`, is configured as the `upstream` remote, #' using the indicated `protocol`. #' * The local `DEFAULT` branch is set to track `upstream/DEFAULT`, where #' `DEFAULT` is typically `main` or `master`. It is also immediately pulled, #' to cover the case of a pre-existing, out-of-date fork. #' #' If `fork = NA` (the default), we check your permissions on `repo_spec`. If #' you can push, we set `fork = FALSE`, If you cannot, we set `fork = TRUE`. #' @param host GitHub host to target, passed to the `.api_url` argument of #' [gh::gh()]. If `repo_spec` is a URL, `host` is extracted from that. #' #' If unspecified, gh defaults to "https://api.github.com", although gh's #' default can be customised by setting the GITHUB_API_URL environment #' variable. #' #' For a hypothetical GitHub Enterprise instance, either #' "https://github.acme.com/api/v3" or "https://github.acme.com" is #' acceptable. #' @param rstudio Initiate an [RStudio #' Project](https://r-pkgs.org/workflow101.html#sec-workflow101-rstudio-projects)? #' Defaults to `TRUE` if in an RStudio session and project has no #' pre-existing `.Rproj` file. Defaults to `FALSE` otherwise (but note that #' the cloned repo may already be an RStudio Project, i.e. may already have a #' `.Rproj` file). #' @inheritParams use_github #' #' @export #' @examples #' \dontrun{ #' create_from_github("r-lib/usethis") #' #' # repo_spec can be a URL #' create_from_github("https://github.com/r-lib/usethis") #' #' # a URL repo_spec also specifies the host (e.g. GitHub Enterprise instance) #' create_from_github("https://github.acme.com/OWNER/REPO") #' } create_from_github <- function(repo_spec, destdir = NULL, fork = NA, rstudio = NULL, open = rlang::is_interactive(), protocol = git_protocol(), host = NULL) { check_protocol(protocol) parsed_repo_spec <- parse_repo_url(repo_spec) if (!is.null(parsed_repo_spec$host)) { repo_spec <- parsed_repo_spec$repo_spec host <- parsed_repo_spec$host } whoami <- suppressMessages(gh::gh_whoami(.api_url = host)) no_auth <- is.null(whoami) user <- if (no_auth) NULL else whoami$login hint <- code_hint_with_host("gh_token_help", host) if (no_auth && is.na(fork)) { ui_abort(c( "x" = "Unable to discover a GitHub personal access token.", "x" = "Therefore, can't determine your permissions on {.val {repo_spec}}.", "x" = "Therefore, can't decide if {.arg fork} should be {.code TRUE} or {.code FALSE}.", "", "i" = "You have two choices:", "_" = "Make your token available (if in doubt, DO THIS):", " " = "Call {.code {hint}} for instructions that should help.", "_" = "Call {.fun create_from_github} again, but with {.code fork = FALSE}.", " " = "Only do this if you are absolutely sure you don't want to fork.", " " = "Note you will NOT be in a position to make a pull request." )) } if (no_auth && isTRUE(fork)) { ui_abort(c( "x" = "Unable to discover a GitHub personal access token.", "i" = "A token is required in order to fork {.val {repo_spec}}.", "_" = "Call {.code {hint}} for help configuring a token." )) } # one of these is true: # - gh is discovering a token for `host` # - gh is NOT discovering a token, but `fork = FALSE`, so that's OK source_owner <- spec_owner(repo_spec) repo_name <- spec_repo(repo_spec) gh <- gh_tr(list(repo_owner = source_owner, repo_name = repo_name, api_url = host)) repo_info <- gh("GET /repos/{owner}/{repo}") # 2023-01-28 We're seeing the GitHub bug again around default branch in a # fresh fork. If I create a fork, the POST payload *sometimes* mis-reports the # default branch. I.e. it reports `main`, even though the actual default # branch is `master`. Therefore we're reverting to consulting the source repo # for this info default_branch <- repo_info$default_branch if (is.na(fork)) { fork <- !isTRUE(repo_info$permissions$push) fork_status <- glue("fork = {fork}") ui_bullets(c("v" = "Setting {.code {fork_status}}.")) } # fork is either TRUE or FALSE if (fork && identical(user, repo_info$owner$login)) { ui_abort(" Can't fork, because the authenticated user {.val {user}} already owns the source repo {.val {repo_info$full_name}}.") } destdir <- user_path_prep(destdir %||% conspicuous_place()) check_path_is_directory(destdir) challenge_nested_project(destdir, repo_name) repo_path <- path(destdir, repo_name) create_directory(repo_path) check_directory_is_empty(repo_path) if (fork) { ## https://developer.github.com/v3/repos/forks/#create-a-fork ui_bullets(c("v" = "Forking {.val {repo_info$full_name}}.")) upstream_url <- switch( protocol, https = repo_info$clone_url, ssh = repo_info$ssh_url ) repo_info <- gh("POST /repos/{owner}/{repo}/forks") ui_bullets(c("i" = "Waiting for the fork to finalize before cloning...")) Sys.sleep(3) } origin_url <- switch( protocol, https = repo_info$clone_url, ssh = repo_info$ssh_url ) ui_bullets(c( "v" = "Cloning repo from {.val {origin_url}} into {.path {repo_path}}." )) gert::git_clone(origin_url, repo_path, verbose = FALSE) proj_path <- find_rstudio_root(repo_path) local_project(proj_path, force = TRUE) # schedule restoration of project # 2023-01-28 again, it would be more natural to trust the default branch of # the fork, but that cannot always be trusted. For now, we're still using # the default branch learned from the source repo. ui_bullets(c("i" = "Default branch is {.val {default_branch}}.")) if (fork) { ui_bullets(c( "v" = "Adding {.val upstream} remote: {.val {upstream_url}}" )) use_git_remote("upstream", upstream_url) pr_merge_main() upstream_remref <- glue("upstream/{default_branch}") ui_bullets(c( "v" = "Setting remote tracking branch for local {.val {default_branch}} branch to {.val {upstream_remref}}." )) gert::git_branch_set_upstream(upstream_remref, repo = git_repo()) config_key <- glue("remote.upstream.created-by") gert::git_config_set(config_key, "usethis::create_from_github", repo = git_repo()) } rstudio <- rstudio %||% rstudio_available() rstudio <- rstudio && !is_rstudio_project() if (rstudio) { use_rstudio(reformat = FALSE) } if (open) { if (proj_activate(proj_get())) { # Working directory/active project changed; so don't undo on exit withr::deferred_clear() } } invisible(proj_get()) } # If there's a single directory containing an .Rproj file, use it. # Otherwise work in the repo root find_rstudio_root <- function(path) { rproj <- rproj_paths(path, recurse = TRUE) if (length(rproj) == 1) { path_dir(rproj) } else { path } } challenge_nested_project <- function(path, name) { if (!possibly_in_proj(path)) { return(invisible()) } # creates an undocumented backdoor we can exploit when the interactive # approval is impractical, e.g. in tests if (isTRUE(getOption("usethis.allow_nested_project", FALSE))) { return(invisible()) } ui_bullets(c( "!" = "New project {.val {name}} is nested inside an existing project {.path {pth(path)}}, which is rarely a good idea.", "i" = "If this is unexpected, the {.pkg here} package has a function, {.fun here::dr_here} that reveals why {.path {pth(path)}} is regarded as a project." )) if (ui_nah("Do you want to create anyway?")) { ui_abort("Cancelling project creation.") } invisible() } challenge_home_directory <- function(path) { homes <- unique(c(path_home(), path_home_r())) if (!path %in% homes) { return(invisible()) } qualification <- if (is_windows()) { glue("a special directory, i.e. some applications regard it as ") } else { "" } ui_bullets(c( "!" = "{.path {pth(path)}} is {qualification}your home directory.", "i" = "It is generally a bad idea to create a new project here.", "i" = "You should probably create your new project in a subdirectory." )) if (ui_nah("Do you want to create anyway?")) { ui_abort("Good move! Cancelling project creation.") } invisible() } usethis/R/edit.R0000644000176200001440000001665314717524721013234 0ustar liggesusers#' Open file for editing #' #' Opens a file for editing in RStudio, if that is the active environment, or #' via [utils::file.edit()] otherwise. If the file does not exist, it is #' created. If the parent directory does not exist, it is also created. #' `edit_template()` specifically opens templates in `inst/templates` for use #' with [use_template()]. #' #' @param path Path to target file. #' @param open Whether to open the file for interactive editing. #' @return Target path, invisibly. #' @export #' @keywords internal #' #' @examples #' \dontrun{ #' edit_file("DESCRIPTION") #' edit_file("~/.gitconfig") #' } edit_file <- function(path, open = rlang::is_interactive()) { open <- open && is_interactive() path <- user_path_prep(path) create_directory(path_dir(path)) file_create(path) if (!open) { ui_bullets(c("_" = "Edit {.path {pth(path)}}.")) return(invisible(path)) } ui_bullets(c("_" = "Modify {.path {pth(path)}}.")) if (rstudio_available() && rstudioapi::hasFun("navigateToFile")) { rstudioapi::navigateToFile(path) } else { utils::file.edit(path) } invisible(path) } #' @param template The target template file. If not specified, existing template #' files are offered for interactive selection. #' @export #' @rdname edit_file edit_template <- function(template = NULL, open = rlang::is_interactive()) { check_is_package("edit_template()") if (is.null(template)) { ui_bullets(c( "!" = "No template specified ... checking {.path {pth('inst/templates')}}." )) template <- choose_template() } if (is_empty(template)) { return(invisible()) } path <- proj_path("inst", "templates", template) edit_file(path, open) } choose_template <- function() { if (!is_interactive()) { return(character()) } templates <- path_file(dir_ls(proj_path("inst", "templates"), type = "file")) if (is_empty(templates)) { return(character()) } choice <- utils::menu( choices = templates, title = "Which template do you want to edit? (0 to exit)" ) templates[choice] } #' Open configuration files #' #' * `edit_r_profile()` opens `.Rprofile` #' * `edit_r_environ()` opens `.Renviron` #' * `edit_r_makevars()` opens `.R/Makevars` #' * `edit_git_config()` opens `.gitconfig` or `.git/config` #' * `edit_git_ignore()` opens global (user-level) gitignore file and ensures #' its path is declared in your global Git config. #' * `edit_pkgdown_config` opens the pkgdown YAML configuration file for the #' current Project. #' * `edit_rstudio_snippets()` opens RStudio's snippet config for the given type. #' * `edit_rstudio_prefs()` opens [RStudio's preference file][use_rstudio_preferences()]. #' #' The `edit_r_*()` functions consult R's notion of user's home directory. #' The `edit_git_*()` functions (and \pkg{usethis} in general) inherit home #' directory behaviour from the \pkg{fs} package, which differs from R itself #' on Windows. The \pkg{fs} default is more conventional in terms of the #' location of user-level Git config files. See [fs::path_home()] for more #' details. #' #' Files created by `edit_rstudio_snippets()` will *mask*, not supplement, #' the built-in default snippets. If you like the built-in snippets, copy them #' and include with your custom snippets. #' #' @return Path to the file, invisibly. #' #' @param scope Edit globally for the current __user__, or locally for the #' current __project__ #' @name edit NULL #' @export #' @rdname edit edit_r_profile <- function(scope = c("user", "project")) { path <- scoped_path_r(scope, ".Rprofile", envvar = "R_PROFILE_USER") edit_file(path) ui_bullets(c("_" = "Restart R for changes to take effect.")) invisible(path) } #' @export #' @rdname edit edit_r_environ <- function(scope = c("user", "project")) { path <- scoped_path_r(scope, ".Renviron", envvar = "R_ENVIRON_USER") edit_file(path) ui_bullets(c("_" = "Restart R for changes to take effect.")) invisible(path) } #' @export #' @rdname edit edit_r_buildignore <- function() { check_is_package("edit_r_buildignore()") edit_file(proj_path(".Rbuildignore")) } #' @export #' @rdname edit edit_r_makevars <- function(scope = c("user", "project")) { path <- scoped_path_r(scope, ".R", "Makevars") edit_file(path) } #' @export #' @rdname edit #' @param type Snippet type (case insensitive text). edit_rstudio_snippets <- function(type = c( "r", "markdown", "c_cpp", "css", "html", "java", "javascript", "python", "sql", "stan", "tex", "yaml" )) { type <- tolower(type) type <- match.arg(type) file <- path_ext_set(type, "snippets") # Snippet location changed in 1.3: # https://blog.rstudio.com/2020/02/18/rstudio-1-3-preview-configuration/ new_rstudio <- !rstudioapi::isAvailable() || rstudioapi::getVersion() >= "1.3.0" old_path <- path_home_r(".R", "snippets", file) new_path <- rstudio_config_path("snippets", file) # Mimic RStudio behaviour: copy to new location if you edit if (new_rstudio && file_exists(old_path) && !file_exists(new_path)) { create_directory(path_dir(new_path)) file_copy(old_path, new_path) ui_bullets(c( "v" = "Copying snippets file to {.path {pth(new_path)}}." )) } path <- if (new_rstudio) new_path else old_path if (!file_exists(path)) { ui_bullets(c( "v" = "New snippet file at {.path {pth(path)}}.", "i" = "This masks the default snippets for {.field {type}}.", "i" = "Delete this file and restart RStudio to restore the default snippets." )) } edit_file(path) } #' @export #' @rdname edit edit_rstudio_prefs <- function() { path <- rstudio_config_path("rstudio-prefs.json") edit_file(path) ui_bullets(c("_" = "Restart RStudio for changes to take effect.")) invisible(path) } scoped_path_r <- function(scope = c("user", "project"), ..., envvar = NULL) { scope <- match.arg(scope) # Try environment variable in user scopes if (scope == "user" && !is.null(envvar)) { env <- Sys.getenv(envvar, unset = "") if (!identical(env, "")) { return(user_path_prep(env)) } } root <- switch(scope, user = path_home_r(), project = proj_get() ) path(root, ...) } # git paths --------------------------------------------------------------- # Note that on windows R's definition of ~ is in a nonstandard place, # so it is important to use path_home(), not path_home_r() #' @export #' @rdname edit edit_git_config <- function(scope = c("user", "project")) { scope <- match.arg(scope) path <- switch( scope, user = path_home(".gitconfig"), project = proj_path(".git", "config") ) invisible(edit_file(path)) } #' @export #' @rdname edit edit_git_ignore <- function(scope = c("user", "project")) { scope <- match.arg(scope) if (scope == "user") { ensure_core_excludesFile() } file <- git_ignore_path(scope) if (scope == "user" && !file_exists(file)) { git_vaccinate() } invisible(edit_file(file)) } git_ignore_path <- function(scope = c("user", "project")) { scope <- match.arg(scope) switch( scope, user = git_cfg_get("core.excludesFile", where = "global"), project = proj_path(".gitignore") ) } # pkgdown --------------------------------------------------------------- #' @export #' @rdname edit edit_pkgdown_config <- function() { path <- pkgdown_config_path() if (is.null(path)) { ui_bullets(c("x" = "No pkgdown config file found in current Project.")) } else { invisible(edit_file(path)) } } usethis/R/template.R0000644000176200001440000000561414651000165014101 0ustar liggesusers#' Use a usethis-style template #' #' Creates a file from data and a template found in a package. Provides control #' over file name, the addition to `.Rbuildignore`, and opening the file for #' inspection. #' #' This function can be used as the engine for a templating function in other #' packages. The `template` argument is used along with the `package` argument #' to derive the path to your template file; it will be expected at #' `fs::path_package(package = package, "templates", template)`. We use #' `fs::path_package()` instead of `base::system.file()` so that path #' construction works even in a development workflow, e.g., works with #' `devtools::load_all()` or `pkgload::load_all()`. *Note this describes the #' behaviour of `fs::path_package()` in fs v1.2.7.9001 and higher.* #' #' To interpolate your data into the template, supply a list using #' the `data` argument. Internally, this function uses #' [whisker::whisker.render()] to combine your template file with your data. #' #' @param template Path to template file relative to `templates/` directory #' within `package`; see details. #' @param save_as Path of file to create, relative to root of active project. #' Defaults to `template` #' @param data A list of data passed to the template. #' @param ignore Should the newly created file be added to `.Rbuildignore`? #' @param open Open the newly created file for editing? Happens in RStudio, if #' applicable, or via [utils::file.edit()] otherwise. #' @param package Name of the package where the template is found. #' @return A logical vector indicating if file was modified. #' @export #' @examples #' \dontrun{ #' # Note: running this will write `NEWS.md` to your working directory #' use_template( #' template = "NEWS.md", #' data = list(Package = "acme", Version = "1.2.3"), #' package = "usethis" #' ) #' } use_template <- function(template, save_as = template, data = list(), ignore = FALSE, open = FALSE, package = "usethis") { template_contents <- render_template(template, data, package = package) new <- write_over(proj_path(save_as), template_contents) if (ignore) { use_build_ignore(save_as) } if (open && new) { edit_file(proj_path(save_as)) } invisible(new) } render_template <- function(template, data = list(), package = "usethis") { template_path <- find_template(template, package = package) strsplit(whisker::whisker.render(read_utf8(template_path), data), "\n")[[1]] } find_template <- function(template_name, package = "usethis") { check_installed(package) path <- tryCatch( path_package(package = package, "templates", template_name), error = function(e) "" ) if (identical(path, "")) { ui_abort(" Could not find template {.val {template_name}} in package {.pkg package} package.") } path } usethis/R/documentation.R0000644000176200001440000000241114651000165015127 0ustar liggesusers#' Package-level documentation #' #' Adds a dummy `.R` file that will cause roxygen2 to generate basic #' package-level documentation. If your package is named "foo", this will make #' help available to the user via `?foo` or `package?foo`. Once you call #' `devtools::document()`, roxygen2 will flesh out the `.Rd` file using data #' from the `DESCRIPTION`. That ensures you don't need to repeat (and remember #' to update!) the same information in multiple places. This `.R` file is also a #' good place for roxygen directives that apply to the whole package (vs. a #' specific function), such as global namespace tags like `@importFrom`. #' #' @seealso The [documentation chapter](https://r-pkgs.org/man.html) of [R #' Packages](https://r-pkgs.org) #' @inheritParams use_template #' @export use_package_doc <- function(open = rlang::is_interactive()) { check_is_package("use_package_doc()") use_directory("R") use_template( "packagename-package.R", package_doc_path(), open = open ) ui_bullets(c( "_" = "Run {.run devtools::document()} to update package-level documentation." )) } package_doc_path <- function() { path("R", paste0(project_name(), "-package"), ext = "R") } has_package_doc <- function() { file_exists(proj_path(package_doc_path())) } usethis/R/spelling.R0000644000176200001440000000232614651000165014100 0ustar liggesusers#' Use spell check #' #' Adds a unit test to automatically run a spell check on documentation and, #' optionally, vignettes during `R CMD check`, using the #' [spelling][spelling::spell_check_package] package. Also adds a `WORDLIST` #' file to the package, which is a dictionary of whitelisted words. See #' [spelling::wordlist] for details. #' #' @param vignettes Logical, `TRUE` to spell check all `rmd` and `rnw` files in #' the `vignettes/` folder. #' @param lang Preferred spelling language. Usually either `"en-US"` or #' `"en-GB"`. #' @param error Logical, indicating whether the unit test should fail if #' spelling errors are found. Defaults to `FALSE`, which does not error, but #' prints potential spelling errors #' @export use_spell_check <- function(vignettes = TRUE, lang = "en-US", error = FALSE) { check_is_package("use_spell_check()") check_installed("spelling") use_dependency("spelling", "Suggests") proj_desc_field_update("Language", lang, overwrite = TRUE) spelling::spell_check_setup( pkg = proj_get(), vignettes = vignettes, lang = lang, error = error ) ui_bullets(c("_" = "Run {.run devtools::check()} to trigger spell check.")) } usethis/R/block.R0000644000176200001440000000552114651000165013355 0ustar liggesusersblock_append <- function(desc, value, path, block_start = "# <<<", block_end = "# >>>", block_prefix = NULL, block_suffix = NULL, sort = FALSE) { if (!is.null(path) && file_exists(path)) { lines <- read_utf8(path) if (all(value %in% lines)) { return(FALSE) } block_lines <- block_find(lines, block_start, block_end) } else { block_lines <- NULL } if (is.null(block_lines)) { ui_bullets(c( "_" = "Copy and paste the following lines into {.path {pth(path)}}:" )) ui_code_snippet(c(block_prefix, block_start, value, block_end, block_suffix)) return(FALSE) } ui_bullets(c("v" = "Adding {.val {desc}} to {.path {pth(path)}}.")) start <- block_lines[[1]] end <- block_lines[[2]] block <- lines[seq2(start, end)] new_lines <- union(block, value) if (sort) { new_lines <- sort(new_lines) } lines <- c( lines[seq2(1, start - 1L)], new_lines, lines[seq2(end + 1L, length(lines))] ) write_utf8(path, lines) TRUE } block_replace <- function(desc, value, path, block_start = "# <<<", block_end = "# >>>") { if (!is.null(path) && file_exists(path)) { lines <- read_utf8(path) block_lines <- block_find(lines, block_start, block_end) } else { block_lines <- NULL } if (is.null(block_lines)) { ui_bullets(c( "_" = "Copy and paste the following lines into {.path {pth(path)}}:" )) ui_code_snippet(c(block_start, value, block_end)) return(invisible(FALSE)) } start <- block_lines[[1]] end <- block_lines[[2]] block <- lines[seq2(start, end)] if (identical(value, block)) { return(invisible(FALSE)) } ui_bullets(c("v" = "Replacing {desc} in {.path {pth(path)}}.")) lines <- c( lines[seq2(1, start - 1L)], value, lines[seq2(end + 1L, length(lines))] ) write_utf8(path, lines) } block_show <- function(path, block_start = "# <<<", block_end = "# >>>") { lines <- read_utf8(path) block <- block_find(lines, block_start, block_end) lines[seq2(block[[1]], block[[2]])] } block_find <- function(lines, block_start = "# <<<", block_end = "# >>>") { # No file if (is.null(lines)) { return(NULL) } start <- which(lines == block_start) end <- which(lines == block_end) # No block if (length(start) == 0 && length(end) == 0) { return(NULL) } if (!(length(start) == 1 && length(end) == 1 && start < end)) { ui_abort(c( "Invalid block specification.", "Must start with {.code {block_start}} and end with {.code {block_end}}." )) } c(start + 1L, end - 1L) } block_create <- function(lines = character(), block_start = "# <<<", block_end = "# >>>") { c(block_start, unique(lines), block_end) } usethis/R/usethis-package.R0000644000176200001440000000614114651000165015337 0ustar liggesusers#' @keywords internal "_PACKAGE" ## usethis namespace: start #' @import fs #' @import rlang #' @importFrom glue glue glue_collapse glue_data #' @importFrom lifecycle deprecated #' @importFrom purrr map map_chr map_lgl map_int #' @importFrom utils available.packages ## usethis namespace: end NULL #' Options consulted by usethis #' #' @description #' User-configurable options consulted by usethis, which provide a mechanism #' for setting default behaviors for various functions. #' #' If the built-in defaults don't suit you, set one or more of these options. #' Typically, this is done in the `.Rprofile` startup file, which you can open #' for editing with [edit_r_profile()] - this will set the specified options for #' all future R sessions. Your code will look something like: #' #' ``` #' options( #' usethis.description = list( #' "Authors@R" = utils::person( #' "Jane", "Doe", #' email = "jane@example.com", #' role = c("aut", "cre"), #' comment = c(ORCID = "YOUR-ORCID-ID") #' ), #' License = "MIT + file LICENSE" #' ), #' usethis.destdir = "/path/to/folder/", # for use_course(), create_from_github() #' usethis.protocol = "ssh", # Use ssh git protocol #' usethis.overwrite = TRUE # overwrite files in Git repos without confirmation #' ) #' ``` #' #' @section Options for the usethis package: #' #' - `usethis.description`: customize the default content of new `DESCRIPTION` #' files by setting this option to a named list. #' If you are a frequent package developer, it is worthwhile to pre-configure #' your preferred name, email, license, etc. See the example above and the #' [article on usethis setup](https://usethis.r-lib.org/articles/articles/usethis-setup.html) #' for more details. #' #' - `usethis.destdir`: Default directory in which to place new projects #' downloaded by [use_course()] and [create_from_github()]. #' If this option is unset, the user's Desktop or similarly conspicuous place #' will be used. #' #' - `usethis.protocol`: specifies your preferred transport protocol for Git. #' Either "https" (default) or "ssh": #' * `usethis.protocol = "https"` implies `https://github.com//.git` #' * `usethis.protocol = "ssh"` implies `git@@github.com:/.git` #' #' You can also change this for the duration of your R session with #' [use_git_protocol()]. #' #' - `usethis.overwrite`: If `TRUE`, usethis overwrites an existing file without #' asking for user confirmation if the file is inside a Git repo. The #' rationale is that the normal Git workflow makes it easy to see and #' selectively accept/discard any proposed changes. #' #' - `usethis.quiet`: Set to `TRUE` to suppress user-facing messages. Default #' `FALSE`. #' #' - `usethis.allow_nested_project`: Whether or not to allow #' you to create a project inside another project. This is rarely a good idea, #' so this option defaults to `FALSE`. #' #' @name usethis_options NULL release_bullets <- function() { c( "Check that `use_code_of_conduct()` is shipping the latest version of the Contributor Covenant ()." ) } usethis/R/make.R0000644000176200001440000000052614651000165013200 0ustar liggesusers#' Create Makefile #' #' `use_make()` adds a basic Makefile to the project root directory. #' #' @seealso The [documentation for GNU #' Make](https://www.gnu.org/software/make/manual/html_node/). #' @export use_make <- function() { use_template( "Makefile", data = list(name = project_name()) ) use_build_ignore("Makefile") } usethis/R/issue.R0000644000176200001440000001357614717524721013440 0ustar liggesusers#' Helpers for GitHub issues #' #' @description #' The `issue_*` family of functions allows you to perform common operations on #' GitHub issues from within R. They're designed to help you efficiently deal #' with large numbers of issues, particularly motivated by the challenges faced #' by the tidyverse team. #' #' * `issue_close_community()` closes an issue, because it's not a bug report or #' feature request, and points the author towards Posit Community as a #' better place to discuss usage (). #' #' * `issue_reprex_needed()` labels the issue with the "reprex" label and #' gives the author some advice about what is needed. #' #' @section Saved replies: #' #' Unlike GitHub's "saved replies", these functions can: #' * Be shared between people #' * Perform other actions, like labelling, or closing #' * Have additional arguments #' * Include randomness (like friendly gifs) #' #' @param number Issue number #' @param reprex Does the issue also need a reprex? #' #' @examples #' \dontrun{ #' issue_close_community(12, reprex = TRUE) #' #' issue_reprex_needed(241) #' } #' @name issue-this NULL #' @export #' @rdname issue-this issue_close_community <- function(number, reprex = FALSE) { tr <- target_repo(github_get = TRUE) if (!tr$can_push) { # https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#repository-access-for-each-permission-level # I have not found a way to detect triage permission via API. # It seems you just have to try? ui_bullets(c( "!" = "You don't seem to have push access for {.val {tr$repo_spec}}.", "i" = "Unless you have triage permissions, you won't be allowed to close an issue." )) if (ui_nah("Do you want to try anyway?")) { ui_bullets(c("x" = "Cancelling.")) return(invisible()) } } info <- issue_info(number, tr) issue <- issue_details(info) ui_bullets(c( "v" = "Closing issue {.val {issue$shorthand}} ({.field {issue$author}}): {.val {issue$title}}." )) if (info$state == "closed") { ui_abort("Issue {.val {number}} is already closed.") } reprex_insert <- glue(" But before you ask there, I'd suggest that you create a \\ [reprex](https://reprex.tidyverse.org/articles/reprex-dos-and-donts.htm), \\ because that greatly increases your chances getting help.") message <- glue( "Hi {issue$author},\n", "\n", "This issue doesn't appear to be a bug report or a specific feature ", "request, so it's more suitable for ", "[RStudio Community](https://community.rstudio.com). ", if (reprex) reprex_insert else "", "\n\n", "Thanks!" ) issue_comment_add(number, message = message, tr = tr) issue_edit(number, state = "closed", tr = tr) } #' @export #' @rdname issue-this issue_reprex_needed <- function(number) { tr <- target_repo(github_get = TRUE) if (!tr$can_push) { # https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#repository-access-for-each-permission-level # I can't find anyway to detect triage permission via API. # It seems you just have to try? ui_bullets(c( "!" = "You don't seem to have push access for {.val {tr$repo_spec}}.", "i" = "Unless you have triage permissions, you won't be allowed to label an issue." )) if (ui_nah("Do you want to try anyway?")) { ui_bullets(c("x" = "Cancelling.")) return(invisible()) } } info <- issue_info(number, tr) labels <- map_chr(info$labels, "name") issue <- issue_details(info) if ("reprex" %in% labels) { ui_abort("Issue {.val {number}} already has {.val reprex} label.") } ui_bullets(c( "v" = "Labelling and commenting on issue {.val {issue$shorthand}} ({.field {issue$author}}): {.val {issue$title}}." )) message <- glue(" Can you please provide a minimal reproducible example using the \\ [reprex](http://reprex.tidyverse.org) package? The goal of a reprex is to make it as easy as possible for me to \\ recreate your problem so that I can fix it. If you've never made a minimal reprex before, there is lots of good advice \\ [here](https://reprex.tidyverse.org/articles/reprex-dos-and-donts.html).") issue_comment_add(number, message = message, tr = tr) issue_edit(number, labels = as.list(union(labels, "reprex")), tr = tr) } # low-level operations ---------------------------------------------------- issue_comment_add <- function(number, message, tr = NULL) { issue_gh( "POST /repos/{owner}/{repo}/issues/{issue_number}/comments", number = number, body = message, tr = tr ) } issue_edit <- function(number, ..., tr = NULL) { issue_gh( "PATCH /repos/{owner}/{repo}/issues/{issue_number}", ..., number = number, tr = tr ) } issue_info <- function(number, tr = NULL) { issue_gh( "GET /repos/{owner}/{repo}/issues/{issue_number}", number = number, tr = tr ) } # Helpers ----------------------------------------------------------------- # Assumptions: # * Issue number is called `issue_number`; make sure to tweak `endpoint` if # necessary. # * The user-facing caller should pass information about the target repo, # because that is required to vet the GitHub remote config anyway. # The fallback to target_repo() is purely for development convenience. issue_gh <- function(endpoint, ..., number, tr = NULL) { tr <- tr %||% target_repo(github_get = NA) gh <- gh_tr(tr) out <- gh(endpoint, ..., issue_number = number) if (substr(endpoint, 1, 4) == "GET ") { out } else { invisible(out) } } issue_details <- function(info) { repo_dat <- parse_github_remotes(info$html_url) list( shorthand = glue( "{repo_dat$repo_owner}/{repo_dat$repo_name}/#{info$number}" ), author = glue("@{info$user$login}"), title = info$title ) } usethis/R/proj.R0000644000176200001440000002234014651514262013243 0ustar liggesusersproj <- new.env(parent = emptyenv()) proj_get_ <- function() proj$cur proj_set_ <- function(path) { old <- proj$cur proj$cur <- path invisible(old) } #' Utility functions for the active project #' #' @description #' Most `use_*()` functions act on the **active project**. If it is #' unset, usethis uses [rprojroot](https://rprojroot.r-lib.org) to #' find the project root of the current working directory. It establishes the #' project root by looking for a `.here` file, an RStudio Project, a package #' `DESCRIPTION`, Git infrastructure, a `remake.yml` file, or a `.projectile` #' file. It then stores the active project for use for the remainder of the #' session. #' #' In general, end user scripts should not contain direct calls to #' `usethis::proj_*()` utility functions. They are internal functions that are #' exported for occasional interactive use or use in packages that extend #' usethis. End user code should call functions in #' [rprojroot](https://rprojroot.r-lib.org) or its simpler companion, #' [here](https://here.r-lib.org), to programmatically detect a project and #' build paths within it. #' #' If you are puzzled why a path (usually the current working directory) does #' *not* appear to be inside project, it can be helpful to call #' `here::dr_here()` to get much more verbose feedback. #' #' @name proj_utils #' @family project functions #' @examples #' \dontrun{ #' ## see the active project #' proj_get() #' #' ## manually set the active project #' proj_set("path/to/target/project") #' #' ## build a path within the active project (both produce same result) #' proj_path("R/foo.R") #' proj_path("R", "foo", ext = "R") #' #' ## build a path within SOME OTHER project #' with_project("path/to/some/other/project", proj_path("blah.R")) #' #' ## convince yourself that with_project() temporarily changes the project #' with_project("path/to/some/other/project", print(proj_sitrep())) #' } NULL #' @describeIn proj_utils Retrieves the active project and, if necessary, #' attempts to set it in the first place. #' @export proj_get <- function() { # Called for first time so try working directory if (!proj_active()) { proj_set(".") } proj_get_() } #' @describeIn proj_utils Sets the active project. #' @param path Path to set. This `path` should exist or be `NULL`. #' @param force If `TRUE`, use this path without checking the usual criteria for #' a project. Use sparingly! The main application is to solve a temporary #' chicken-egg problem: you need to set the active project in order to add #' project-signalling infrastructure, such as initialising a Git repo or #' adding a `DESCRIPTION` file. #' @export proj_set <- function(path = ".", force = FALSE) { if (!force && dir_exists(path %||% "") && is_in_proj(path)) { return(invisible(proj_get_())) } path <- proj_path_prep(path) if (is.null(path) || force) { proj_string <- if (is.null(path)) "" else path ui_bullets(c("v" = "Setting active project to {.val {proj_string}}.")) return(proj_set_(path)) } check_path_is_directory(path) new_project <- proj_find(path) if (is.null(new_project)) { ui_abort(c( "Path {.path {pth(path)}} does not appear to be inside a project or package.", "Read more in the help for {.fun usethis::proj_get}." )) } proj_set(path = new_project, force = TRUE) } #' @describeIn proj_utils Builds paths within the active project returned by #' `proj_get()`. Thin wrapper around [fs::path()]. #' @inheritParams fs::path #' @export proj_path <- function(..., ext = "") { has_absolute_path <- function(x) any(is_absolute_path(x)) dots <- list(...) if (any(map_lgl(dots, has_absolute_path))) { ui_abort("Paths must be relative to the active project, not absolute.") } path_norm(path(proj_get(), ..., ext = ext)) } #' @describeIn proj_utils Runs code with a temporary active project and, #' optionally, working directory. It is an example of the `with_*()` functions #' in [withr](https://withr.r-lib.org). #' @param code Code to run with temporary active project #' @param setwd Whether to also temporarily set the working directory to the #' active project, if it is not `NULL` #' @param quiet Whether to suppress user-facing messages, while operating in the #' temporary active project #' @export with_project <- function(path = ".", code, force = FALSE, setwd = TRUE, quiet = getOption("usethis.quiet", default = FALSE)) { local_project(path = path, force = force, setwd = setwd, quiet = quiet) force(code) } #' @describeIn proj_utils Sets an active project and, optionally, working #' directory until the current execution environment goes out of scope, e.g. #' the end of the current function or test. It is an example of the #' `local_*()` functions in [withr](https://withr.r-lib.org). #' @param .local_envir The environment to use for scoping. Defaults to current #' execution environment. #' @export local_project <- function(path = ".", force = FALSE, setwd = TRUE, quiet = getOption("usethis.quiet", default = FALSE), .local_envir = parent.frame()) { withr::local_options(usethis.quiet = quiet, .local_envir = .local_envir) old_project <- proj_get_() # this could be `NULL`, i.e. no active project withr::defer(proj_set(path = old_project, force = TRUE), envir = .local_envir) proj_set(path = path, force = force) temp_proj <- proj_get_() # this could be `NULL` if (isTRUE(setwd) && !is.null(temp_proj)) { withr::local_dir(temp_proj, .local_envir = .local_envir) } } ## usethis policy re: preparation of the path to active project proj_path_prep <- function(path) { if (is.null(path)) { return(path) } path <- path_abs(path) if (file_exists(path)) { path_real(path) } else { path } } ## usethis policy re: preparation of user-provided path to a resource on user's ## file system user_path_prep <- function(path) { ## usethis uses fs's notion of home directory ## this ensures we are consistent about that path_expand(path) } proj_rel_path <- function(path) { if (is_in_proj(path)) { as.character(path_rel(path, start = proj_get())) } else { path } } proj_crit <- function() { rprojroot::has_file(".here") | rprojroot::is_rstudio_project | rprojroot::is_r_package | rprojroot::is_git_root | rprojroot::is_remake_project | rprojroot::is_projectile_project } proj_find <- function(path = ".") { tryCatch( rprojroot::find_root(proj_crit(), path = path), error = function(e) NULL ) } possibly_in_proj <- function(path = ".") !is.null(proj_find(path)) is_package <- function(base_path = proj_get()) { res <- tryCatch( rprojroot::find_package_root_file(path = base_path), error = function(e) NULL ) !is.null(res) } check_is_package <- function(whos_asking = NULL) { if (is_package()) { return(invisible()) } message <- "Project {.val {project_name()}} is not an R package." if (!is.null(whos_asking)) { whos_asking_fn <- sub("()", "", whos_asking, fixed = TRUE) message <- c( "i" = "{.topic [{whos_asking}](usethis::{whos_asking_fn})} is designed to work with packages.", "x" = message ) } ui_abort(message) } check_is_project <- function() { if (!possibly_in_proj()) { ui_abort(c( "We do not appear to be inside a valid project or package.", "Read more in the help for {.fun usethis::proj_get}." )) } } proj_active <- function() !is.null(proj_get_()) is_in_proj <- function(path) { if (!proj_active()) { return(FALSE) } identical( proj_get(), ## use path_abs() in case path does not exist yet path_common(c(proj_get(), path_expand(path_abs(path)))) ) } project_name <- function(base_path = proj_get()) { ## escape hatch necessary to solve this chicken-egg problem: ## create_package() calls use_description(), which calls project_name() ## to learn package name from the path, in order to make DESCRIPTION ## and DESCRIPTION is how we recognize a package as a usethis project if (!possibly_in_proj(base_path)) { return(path_file(base_path)) } if (is_package(base_path)) { proj_desc(base_path)$get_field("Package") } else { path_file(base_path) } } #' Activate a project #' #' Activates a project in usethis, R session, and (if relevant) RStudio senses. #' If you are in RStudio, this will open a new RStudio session. If not, it will #' change the working directory and [active project][proj_set()]. #' #' @param path Project directory #' @return Single logical value indicating if current session is modified. #' @export proj_activate <- function(path) { check_path_is_directory(path) path <- user_path_prep(path) if (rstudio_available() && rstudioapi::hasFun("openProject")) { ui_bullets(c( "v" = "Opening {.path {pth(path, base = NA)}} in new RStudio session." )) rstudioapi::openProject(path, newSession = TRUE) invisible(FALSE) } else { proj_set(path) rel_path <- path_rel(proj_get(), path_wd()) if (rel_path != ".") { ui_bullets(c( "v" = "Changing working directory to {.path {pth(path, base = NA)}}" )) setwd(proj_get()) } invisible(TRUE) } } usethis/R/utils-ui.R0000644000176200001440000001722114651000165014036 0ustar liggesusers# usethis theme ---------------------------------------------------------------- usethis_theme <- function() { list( # add a "todo" bullet, which is intended to be seen as an unchecked checkbox ".bullets .bullet-_" = list( "text-exdent" = 2, before = function(x) paste0(cli::col_red(cli::symbol$checkbox_off), " ") ), # historically, usethis has used yellow for this ".bullets .bullet-i" = list( "text-exdent" = 2, before = function(x) paste0(cli::col_yellow(cli::symbol$info), " ") ), # we have enough color going on already, don't add color to `*` bullets ".bullets .bullet-*" = list( "text-exdent" = 2, before = function(x) paste0(cli::symbol$bullet, " ") ), # apply quotes to `.field` if we can't style it with color span.field = list(transform = single_quote_if_no_color) ) } single_quote_if_no_color <- function(x) quote_if_no_color(x, "'") quote_if_no_color <- function(x, quote = "'") { # copied from googledrive # TODO: if a better way appears in cli, use it # @gabor says: "if you want to have before and after for the no-color case # only, we can have a selector for that, such as: # span.field::no-color # (but, at the time I write this, cli does not support this yet) if (cli::num_ansi_colors() > 1) { x } else { paste0(quote, x, quote) } } # silence ----------------------------------------------------------------- #' Suppress usethis's messaging #' #' Execute a bit of code without usethis's normal messaging. #' #' @param code Code to execute with usual UI output silenced. #' #' @returns Whatever `code` returns. #' @export #' @examples #' # compare the messaging you see from this: #' browse_github("usethis") #' # vs. this: #' ui_silence( #' browse_github("usethis") #' ) ui_silence <- function(code) { withr::with_options(list(usethis.quiet = TRUE), code) } is_quiet <- function() { isTRUE(getOption("usethis.quiet", default = FALSE)) } # bullets, helpers, and friends ------------------------------------------------ ui_bullets <- function(text, .envir = parent.frame()) { if (is_quiet()) { return(invisible()) } cli::cli_div(theme = usethis_theme()) cli::cli_bullets(text, .envir = .envir) } ui_path_impl <- function(x, base = NULL) { is_directory <- is_dir(x) | grepl("/$", x) if (is.null(base)) { x <- proj_rel_path(x) } else if (!identical(base, NA)) { x <- path_rel(x, base) } # rationalize trailing slashes x <- path_tidy(x) x[is_directory] <- paste0(x[is_directory], "/") unclass(x) } # shorter form for compactness, because this is typical usage: # ui_bullets("blah blah {.path {pth(some_path)}}") pth <- ui_path_impl ui_code_snippet <- function(x, copy = rlang::is_interactive(), language = c("R", ""), interpolate = TRUE, .envir = parent.frame()) { language <- arg_match(language) indent <- function(x, first = " ", indent = first) { x <- gsub("\n", paste0("\n", indent), x) paste0(first, x) } x <- glue_collapse(x, "\n") if (interpolate) { x <- glue(x, .envir = .envir) # what about literal `{` or `}`? # use `interpolate = FALSE`, if appropriate # double them, i.e. `{{` or `}}` # open issue/PR about adding `.open` and `.close` } if (!is_quiet()) { # the inclusion of `.envir = .envir` leads to test failure # I'm consulting with Gabor on this # leaving it out seems fine for my use case # cli::cli_code(indent(x), language = language, .envir = .envir) cli::cli_code(indent(x), language = language) } if (copy && clipr::clipr_available()) { x_no_ansi <- cli::ansi_strip(x) clipr::write_clip(x_no_ansi) style_subtle <- cli::combine_ansi_styles( cli::make_ansi_style("grey"), cli::style_italic ) ui_bullets(c(" " = style_subtle("[Copied to clipboard]"))) } invisible(x) } # inspired by gargle::gargle_map_cli() and gargle::bulletize() usethis_map_cli <- function(x, ...) UseMethod("usethis_map_cli") #' @export usethis_map_cli.default <- function(x, ...) { ui_abort(c( "x" = "Don't know how to {.fun usethis_map_cli} an object of class {.obj_type_friendly {x}}." )) } #' @export usethis_map_cli.NULL <- function(x, ...) NULL #' @export usethis_map_cli.character <- function(x, template = "{.val <>}", .open = "<<", .close = ">>", ...) { as.character(glue(template, .open = .open, .close = .close)) } ui_pre_glue <- function(..., .envir = parent.frame()) { glue(..., .open = "<<", .close = ">>", .envir = .envir) } bulletize <- function(x, bullet = "*", n_show = 5, n_fudge = 2) { n <- length(x) n_show_actual <- compute_n_show(n, n_show, n_fudge) out <- utils::head(x, n_show_actual) n_not_shown <- n - n_show_actual out <- set_names(out, rep_along(out, bullet)) if (n_not_shown == 0) { out } else { c(out, " " = glue("{cli::symbol$ellipsis} and {n_not_shown} more")) } } # I don't want to do "... and x more" if x is silly, i.e. 1 or 2 compute_n_show <- function(n, n_show_nominal = 5, n_fudge = 2) { if (n > n_show_nominal && n - n_show_nominal > n_fudge) { n_show_nominal } else { n } } kv_line <- function(key, value, .envir = parent.frame()) { cli::cli_div(theme = usethis_theme()) key_fmt <- cli::format_inline(key, .envir = .envir) # this must happen first, before `value` has been forced value_fmt <- cli::format_inline("{.val {value}}") # but we might actually want something other than value_fmt if (is.null(value)) { value <- ui_special() } if (inherits(value, "AsIs")) { value_fmt <- cli::format_inline(value, .envir = .envir) } ui_bullets(c("*" = "{key_fmt}: {value_fmt}")) } ui_special <- function(x = "unset") { force(x) I(glue("{cli::col_grey('<[x]>')}", .open = "[", .close = "]")) } # errors ----------------------------------------------------------------------- ui_abort <- function(message, ..., class = NULL, .envir = parent.frame()) { cli::cli_div(theme = usethis_theme()) nms <- names2(message) default_nms <- rep_along(message, "i") default_nms[1] <- "x" nms <- ifelse(nzchar(nms), nms, default_nms) names(message) <- nms cli::cli_abort( message, class = c(class, "usethis_error"), .envir = .envir, ... ) } # questions -------------------------------------------------------------------- ui_yep <- function(x, yes = c("Yes", "Definitely", "For sure", "Yup", "Yeah", "I agree", "Absolutely"), no = c("No way", "Not now", "Negative", "No", "Nope", "Absolutely not"), n_yes = 1, n_no = 2, shuffle = TRUE, .envir = parent.frame()) { if (!is_interactive()) { ui_abort(c( "User input required, but session is not interactive.", "Query: {.val {x}}" )) } n_yes <- min(n_yes, length(yes)) n_no <- min(n_no, length(no)) qs <- c(sample(yes, n_yes), sample(no, n_no)) if (shuffle) { qs <- sample(qs) } cli::cli_inform(x, .envir = .envir) out <- utils::menu(qs) out != 0L && qs[[out]] %in% yes } ui_nah <- function(x, yes = c("Yes", "Definitely", "For sure", "Yup", "Yeah", "I agree", "Absolutely"), no = c("No way", "Not now", "Negative", "No", "Nope", "Absolutely not"), n_yes = 1, n_no = 2, shuffle = TRUE, .envir = parent.frame()) { # TODO(jennybc): is this correct in the case of no selection / cancelling? !ui_yep( x = x, yes = yes, no = no, n_yes = n_yes, n_no = n_no, shuffle = shuffle, .envir = .envir ) } usethis/R/vignette.R0000644000176200001440000001266214721145632014123 0ustar liggesusers#' Create a vignette or article #' #' Creates a new vignette or article in `vignettes/`. Articles are a special #' type of vignette that appear on pkgdown websites, but are not included #' in the package itself (because they are added to `.Rbuildignore` #' automatically). #' #' @section General setup: #' * Adds needed packages to `DESCRIPTION`. #' * Adds `inst/doc` to `.gitignore` so built vignettes aren't tracked. #' * Adds `vignettes/*.html` and `vignettes/*.R` to `.gitignore` so #' you never accidentally track rendered vignettes. #' * For `*.qmd`, adds Quarto-related patterns to `.gitignore` and #' `.Rbuildignore`. #' @param name File name to use for new vignette. Should consist only of #' numbers, letters, `_` and `-`. Lower case is recommended. Can include the #' `".Rmd"` or `".qmd"` file extension, which also dictates whether to place #' an R Markdown or Quarto vignette. R Markdown (`".Rmd"`) is the current #' default, but it is anticipated that Quarto (`".qmd"`) will become the #' default in the future. #' @param title The title of the vignette. If not provided, a title is generated #' from `name`. #' @seealso #' * The [vignettes chapter](https://r-pkgs.org/vignettes.html) of #' [R Packages](https://r-pkgs.org) #' * The pkgdown vignette on Quarto: #' `vignette("quarto", package = "pkgdown")` #' * The quarto (as in the R package) vignette on HTML vignettes: #' `vignette("hello", package = "quarto")` #' @export #' @examples #' \dontrun{ #' use_vignette("how-to-do-stuff", "How to do stuff") #' use_vignette("r-markdown-is-classic.Rmd", "R Markdown is classic") #' use_vignette("quarto-is-cool.qmd", "Quarto is cool") #' } use_vignette <- function(name, title = NULL) { check_is_package("use_vignette()") check_required(name) maybe_name(title) ext <- get_vignette_extension(name) if (ext == "qmd") { check_installed("quarto") check_installed("pkgdown", version = "2.1.0") } name <- path_ext_remove(name) check_vignette_name(name) title <- title %||% name use_dependency("knitr", "Suggests") use_git_ignore("inst/doc") if (tolower(ext) == "rmd") { use_dependency("rmarkdown", "Suggests") proj_desc_field_update("VignetteBuilder", "knitr", overwrite = TRUE, append = TRUE) use_vignette_template("vignette.Rmd", name, title) } else { use_dependency("quarto", "Suggests") proj_desc_field_update("VignetteBuilder", "quarto", overwrite = TRUE, append = TRUE) use_vignette_template("vignette.qmd", name, title) } invisible() } #' @export #' @rdname use_vignette use_article <- function(name, title = NULL) { check_is_package("use_article()") check_required(name) maybe_name(title) ext <- get_vignette_extension(name) if (ext == "qmd") { check_installed("quarto") check_installed("pkgdown", version = "2.1.0") } name <- path_ext_remove(name) title <- title %||% name if (tolower(ext) == "rmd") { proj_desc_field_update("Config/Needs/website", "rmarkdown", overwrite = TRUE, append = TRUE) use_vignette_template("article.Rmd", name, title, subdir = "articles") } else { use_dependency("quarto", "Suggests") proj_desc_field_update("Config/Needs/website", "quarto", overwrite = TRUE, append = TRUE) use_vignette_template("article.qmd", name, title, subdir = "articles") } use_build_ignore("vignettes/articles") invisible() } use_vignette_template <- function(template, name, title, subdir = NULL) { check_name(template) check_name(name) check_name(title) maybe_name(subdir) ext <- get_vignette_extension(template) if (is.null(subdir)) { target_dir <- "vignettes" } else { target_dir <- path("vignettes", subdir) } use_directory(target_dir) use_git_ignore(c("*.html", "*.R"), directory = target_dir) if (ext == "qmd") { use_git_ignore("**/.quarto/") use_git_ignore("*_files", target_dir) use_build_ignore(path(target_dir, ".quarto")) use_build_ignore(path(target_dir, "*_files")) } path <- path(target_dir, asciify(name), ext = ext) data <- list( Package = project_name(), vignette_title = title, braced_vignette_title = glue("{{{title}}}") ) use_template(template, save_as = path, data = data, open = TRUE ) path } check_vignette_name <- function(name) { if (!valid_vignette_name(name)) { ui_abort(c( "{.val {name}} is not a valid filename for a vignette. It must:", "Start with a letter.", "Contain only letters, numbers, '_', and '-'." )) } } # https://cran.r-project.org/doc/manuals/r-release/R-exts.html#Writing-package-vignettes # "To ensure that they can be accessed from a browser (as an HTML index is # provided), the file names should start with an ASCII letter and be comprised # entirely of ASCII letters or digits or hyphen or underscore." valid_vignette_name <- function(x) { grepl("^[[:alpha:]][[:alnum:]_-]*$", x) } check_vignette_extension <- function(ext) { # Quietly accept "rmd" here, tho we'll always write ".Rmd" in such a filepath if (! ext %in% c("Rmd", "rmd", "qmd")) { valid_exts_cli <- cli::cli_vec( c("Rmd", "qmd"), style = list("vec-sep2" = " or ") ) ui_abort(c( "Unsupported file extension: {.val {ext}}", "usethis can only create a vignette or article with one of these extensions: {.val {valid_exts_cli}}." )) } } get_vignette_extension <- function(name) { ext <- path_ext(name) if (nzchar(ext)) { check_vignette_extension(ext) } else { ext <- "Rmd" } ext } usethis/NAMESPACE0000644000176200001440000001164714717524721013200 0ustar liggesusers# Generated by roxygen2: do not edit by hand S3method(format,github_remote_config) S3method(print,github_remote_config) S3method(print,sitrep) S3method(usethis_map_cli,"NULL") S3method(usethis_map_cli,character) S3method(usethis_map_cli,default) export(browse_circleci) export(browse_cran) export(browse_github) export(browse_github_actions) export(browse_github_issues) export(browse_github_pulls) export(browse_package) export(browse_project) export(create_download_url) export(create_from_github) export(create_github_token) export(create_package) export(create_project) export(create_tidy_package) export(edit_file) export(edit_git_config) export(edit_git_ignore) export(edit_pkgdown_config) export(edit_r_buildignore) export(edit_r_environ) export(edit_r_makevars) export(edit_r_profile) export(edit_rstudio_prefs) export(edit_rstudio_snippets) export(edit_template) export(gh_token_help) export(git_branch_default) export(git_default_branch) export(git_default_branch_configure) export(git_default_branch_rediscover) export(git_default_branch_rename) export(git_protocol) export(git_remotes) export(git_sitrep) export(git_vaccinate) export(issue_close_community) export(issue_reprex_needed) export(local_project) export(pr_fetch) export(pr_finish) export(pr_forget) export(pr_init) export(pr_merge_main) export(pr_pause) export(pr_pull) export(pr_push) export(pr_resume) export(pr_view) export(proj_activate) export(proj_get) export(proj_path) export(proj_set) export(proj_sitrep) export(rename_files) export(tidy_label_colours) export(tidy_label_descriptions) export(tidy_labels) export(tidy_labels_rename) export(ui_code) export(ui_code_block) export(ui_done) export(ui_field) export(ui_info) export(ui_line) export(ui_nope) export(ui_oops) export(ui_path) export(ui_silence) export(ui_stop) export(ui_todo) export(ui_unset) export(ui_value) export(ui_warn) export(ui_yeah) export(use_addin) export(use_agpl3_license) export(use_agpl_license) export(use_apache_license) export(use_apl2_license) export(use_article) export(use_author) export(use_badge) export(use_binder_badge) export(use_bioc_badge) export(use_blank_slate) export(use_build_ignore) export(use_c) export(use_cc0_license) export(use_ccby_license) export(use_circleci) export(use_circleci_badge) export(use_citation) export(use_code_of_conduct) export(use_conflicted) export(use_course) export(use_coverage) export(use_covr_ignore) export(use_cpp11) export(use_cran_badge) export(use_cran_comments) export(use_data) export(use_data_raw) export(use_data_table) export(use_description) export(use_description_defaults) export(use_dev_package) export(use_dev_version) export(use_devtools) export(use_directory) export(use_git) export(use_git_config) export(use_git_hook) export(use_git_ignore) export(use_git_protocol) export(use_git_remote) export(use_github) export(use_github_action) export(use_github_action_check_full) export(use_github_action_check_release) export(use_github_action_check_standard) export(use_github_action_pr_commands) export(use_github_actions) export(use_github_actions_badge) export(use_github_file) export(use_github_labels) export(use_github_links) export(use_github_pages) export(use_github_release) export(use_gitlab_ci) export(use_gpl3_license) export(use_gpl_license) export(use_import_from) export(use_jenkins) export(use_latest_dependencies) export(use_lgpl_license) export(use_lifecycle) export(use_lifecycle_badge) export(use_logo) export(use_make) export(use_mit_license) export(use_namespace) export(use_news_md) export(use_package) export(use_package_doc) export(use_partial_warnings) export(use_pipe) export(use_pkgdown) export(use_pkgdown_github_pages) export(use_posit_cloud_badge) export(use_proprietary_license) export(use_r) export(use_rcpp) export(use_rcpp_armadillo) export(use_rcpp_eigen) export(use_readme_md) export(use_readme_rmd) export(use_release_issue) export(use_reprex) export(use_revdep) export(use_rmarkdown_template) export(use_roxygen_md) export(use_rscloud_badge) export(use_rstudio) export(use_rstudio_preferences) export(use_spell_check) export(use_standalone) export(use_template) export(use_test) export(use_test_helper) export(use_testthat) export(use_tibble) export(use_tidy_coc) export(use_tidy_contributing) export(use_tidy_dependencies) export(use_tidy_description) export(use_tidy_eval) export(use_tidy_github) export(use_tidy_github_actions) export(use_tidy_github_labels) export(use_tidy_issue_template) export(use_tidy_logo) export(use_tidy_style) export(use_tidy_support) export(use_tidy_thanks) export(use_tidy_upkeep_issue) export(use_tutorial) export(use_upkeep_issue) export(use_usethis) export(use_version) export(use_vignette) export(use_zip) export(with_project) export(write_over) export(write_union) import(fs) import(rlang) importFrom(glue,glue) importFrom(glue,glue_collapse) importFrom(glue,glue_data) importFrom(lifecycle,deprecated) importFrom(purrr,map) importFrom(purrr,map_chr) importFrom(purrr,map_int) importFrom(purrr,map_lgl) importFrom(utils,available.packages) usethis/LICENSE0000644000176200001440000000005514706002662012746 0ustar liggesusersYEAR: 2020 COPYRIGHT HOLDER: usethis authors usethis/NEWS.md0000644000176200001440000021003014721173047013036 0ustar liggesusers# usethis 3.1.0 * `use_vignette()` and `use_article()` support Quarto. The `name` of the new vignette or article can optionally include a file extension to signal whether `.Rmd` or `.qmd` is desired, with `.Rmd` remaining the default for now. Thanks to @olivroy for getting the ball rolling (#1997). * `use_data()` defaults to serialization version 3 (@laurabrianna, #2044). * `use_package()` can lower a minimum version requirement (@jplecavalier, #1957). * `use_release_issue()` only suggests doing reverse dependency checks if there are, in fact, reverse dependencies (#1817, @seankross). * `use_tidy_upkeep_issue()` records the year it is being run in the `Config/usethis/upkeep` field in DESCRIPTION. If this value exists, it is used to filter the checklist when making the issue. # usethis 3.0.0 ## Transition to cli package for UI * The `ui_*()` functions have been marked as [superseded](https://lifecycle.r-lib.org/articles/stages.html#superseded). External users of these functions are encouraged to use the [cli package](https://cli.r-lib.org/) instead. The cli package did not have the required functionality when the `usethis::ui_*()` functions were first created, but it does now and it's the superior option. There is a cli vignette about how to make this transition: `vignette("usethis-ui", package = "cli")`. usethis no longer uses the `ui_*()` functions internally, in favor of new cli-based helpers that are not exported. ## Deprecated function and argument removal We are removing functions and arguments that were deprecated as of usethis v2.0.0, which was released in December 2020. These changes have been in place for a long time now: * Switch from git2r to gert (+ credentials). * Use of git config and the gh package to infer, e.g., the target repo spec. * Pivot towards GitHub Actions and away from Travis and AppVeyor. Functions that are removed and, where applicable, what to use instead: * `git_credentials()` * `use_git_credentials()` * `browse_github_token()` (do `create_github_token()`) * `browse_github_pat()` (do `create_github_token()`) * `github_token()` (do `gh_token_help()` or `gh::gh_token()`) * `pr_pull_upstream()` (do `pr_merge_main()`) * `pr_sync()` (do `pr_merge_main(); pr_push()`) * `use_appveyor()` * `use_appveyor_badge()` * `use_travis()` * `use_travis_badge()` * `browse_travis()` * `use_pkgdown_travis()` * `use_tidy_ci()` *deprecated in v2.1.0* (do `use_tidy_github_actions()`) * `use_tidy_labels()` *deprecated in v2.1.0* (do `use_tidy_github_labels()`) Function arguments that are removed: * `create_from_github(auth_token =, credentials =)` * `use_github(auth_token =, credentials =)` * `use_github_labels(repo_spec =, host =, auth_token =)` * `use_github_links(auth_token =, host =)` * `use_github_release(host =, auth_token =)` ## Other changes * `use_zip()` and `use_course()` are equipped to handle a ZIP where the parent folder is implicit (@burnsal, #1961). * `use_test_helper()` is a new function to create a test helper file (@olivroy, #1822). * `use_cpp11()` makes it easier to update `NAMESPACE` (@pachadotdev, #1921). * `pr_merge_main()` now offers the choice to not open the files with merge conflicts (@olivroy, #1720). * `edit_rstudio_snippets()` now accepts yaml snippets (@olivroy, #1941). * `use_standalone()` inserts an improved header that includes the code needed to update the standalone file (@krlmlr, #1903). * `use_release_issue()` and `use_upkeep_issue()` behave better when the user has a fork. The user is asked just once to choose between `origin` and `upstream` as the target repo (#2023). * The README templates now recommend [pak](https://pak.r-lib.org) instead of devtools for package installation (@olivroy, #1723). * `use_github()` now knows that you can reuse the name of an earlier repo that has since been renamed (@ateucher, #1893). * `use_git()` no longer asks if you want to restart RStudio when using Positron. * `use_test()` and `use_r()` now work when you are in `tests/testthat/_snaps/{foo}.md` (@olivroy, #1988). * The URLs baked into the badge generated by `use_coverage(type = "codecov")` are updated and no longer specify a branch(#2008). * `usethis::use_version()` now tolerates empty lines preceding the first section title in the package NEWS file. (#1976) # usethis 2.2.3 * Patch release with changes to `.Rd` files requested by CRAN. # usethis 2.2.2 * Implicit usage of `numeric_version()` via comparison now always provides character input. This is in response to a request from CRAN to anticipate future solutions to . # usethis 2.2.1 * Internal helper `cran_version()`, used in functions such as `use_release_checklist()` and `use_news_md()`, is more resilient to situations where no CRAN mirror has been set (#1857). * Internal usage of `numeric_version()` now always provides character input, rather than relying on implicit `as.character()` coercion. This is in response to a request from CRAN to anticipate future solutions to (#1869). # usethis 2.2.0 ## New functions * `use_author()` is a new function to introduce a new person into the `Authors@R` field of DESCRIPTION (@avalcarcel9, #833). * `use_rstudio_preferences()` lets you set RStudio preferences programmatically (#1518) * `use_standalone()` is a new function that makes it easier to use standalone files provided by various low-level tidyverse packages, like rlang (#1654). * `use_upkeep_issue()` is a new function to facilitate regular maintenance of your package. Similar to `use_release_issue()`, it opens an issue in your repo with a checklist of maintenance tasks. It will include additional bullets if your package includes an `upkeep_bullets()` function that returns a character vector (#1794). ## Package development * Although nested projects are discouraged, they can be useful in development contexts. `create_package()` now sets the correct package name and returns the correct package path for a package nested inside a project (#1647). * `use_article()` no longer adds the rmarkdown package to `Suggests`. Instead, if rmarkdown is not already a dependency, it's added to `Config/Needs/website`. This means that a package that only uses articles (vs. vignettes) won't gain an unnecessary dependency on rmarkdown (#1700). * `use_data()` now sets the appropriate minimal R version in `DESCRIPTION`, depending on which serialization format `version` you choose (@dpprdan, #1672). * `use_github_links()` by default now appends the GitHub url to existing urls in in the `URL` field of DESCRIPTION, rather than replacing existing urls (#1805). * `use_latest_dependencies()` no longer affects `Suggests` since those dependencies are not enforced (#1749). * `use_news_md()` now places "(development version)" in the header of `NEWS.md` if there is a development version number in `DESCRIPTION`. It also sets the first bullet to "Initial CRAN submission" when it looks like a "new" package (#1708). * `use_coverage()` no longer adds covr to `Suggests`, since the `test-coverage` GitHub Actions workflow takes care of installing covr (@Bisaloo, #1851). ## Package release * `use_release_issue()` will now remind you to run `use_github_links()` if necessary (@Bisaloo, #1754) * `use_release_issue()` now encourages the creation of `NEWS.md` prior to submission, instead of after (#1755). * `use_github_release()` now automatically pushes to GitHub (if safe) and automatically publishes the release, rather than requiring you to edit and publish the draft (#1385). * `use_github_release()` no longer fails in the absence of `NEWS.md` (#1755). * `use_release_issue()` will now remind you to check/close the milestone corresponding to the release, if it exists (#1642). * `use_version()` and `use_dev_version()` gain a `push` argument to optionally push the result after committing. This is used to eliminate a manual step from the `use_release_issue()` checklist (#1385). * `use_revdep()` no longer places an email template, because these days we are more likely to communicate with other maintainers about breaking changes via GitHub issues and pull requests (#1769). ## Package file management * `rename_files()` now also affects files in `src/` (#1585). * `use_r()` and `use_test()` now work with all active files in `R/`, `src/`, and `tests/testthat/` (#1566). * `use_r()` and `use_test()` now work with files containing `.` (#1690). * `use_rcpp()`, `use_c()` and friends now work the same way as `use_r()` and `use_test()`: they'll take the default file name from the file you currently have open in RStudio (#1730). ## Git and GitHub * `create_from_github()` will now use an existing `.Rproj` file if it exists anywhere in the repo, not just the root directory. This is useful if you're working with repos that contain tools for multiple languages (#1680). * `git_sitrep()` gains two arguments: `tool` and `scope`, which enables you to limit the report to, for example, `tool = "git"` or `scope = "user"`. The default remains to provide a full report. Also, provides more feedback if git user's information is not set, and checks global git-email against user-level GitHub PAT (@ijlyttle, #1732, #1714, #1706). * `git_vaccinated()` now treats a path configured as `core.excludesFile` like other user-supplied paths; in particular, any use of the `~/` home directory shortcut is expanded via [`fs::path_expand()`](https://fs.r-lib.org/reference/path_expand.html) (@dpprdan, #1560). * `use_github_action()` now suggests possible actions when called without arguments (#1724). * `use_github_actions()`, `use_github_action_check_standard()`, `use_github_action_check_release()`, and `use_github_action_pr_commands()` have been deprecated in favour of the new interactive powers of `use_github_action()` (#1724). ## Minor improvements and fixes * Links to the R Packages book have been updated to the second edition of the book (#1689). * The SVG badges placed by `use_lifecycle()` have improved accessibility features, i.e. they advertise the lifecycle stage via the `aria-label` attribute (#1554, https://github.com/r-lib/lifecycle/issues/117). * `use_rscloud_badge()` has been deprecated in favour of `use_posit_cloud_badge()`, and both functions now accept the updated url format of Posit Cloud projects (#1670). * `use_rstudio()` gains a `reformat` argument which omits `.Rproj` settings that enforce file formatting conventions, e.g. around whitespace. `create_from_github()` uses this option when it introduces an `.Rproj` to a project that lacks one, making it easier to follow the project's existing conventions (#1679). * `write_over()` and `use_github_file()` gain an overwrite argument (#1748). ## Tidyverse-related * `use_release_issue()` now uses internal `release_extra_revdeps()` to add extra revdep sources. Currently only use for internal Posit tooling, but we hope to extend to all users in the future (#1610). * `use_tidy_logo()` is a new function that calls `use_logo()` on the appropriate hex sticker PNG file at (#1871). ## Deprecated functions * `use_tidy_eval()` is now deprecated because it imports and re-exports a large number of functions that are no longer needed in order to do tidy evaluation (#1656). * `use_travis()`, `use_pkgdown_travis()`, `browse_travis()`, and `use_appveyor()` are now deprecated because we no longer recommend Travis or Appveyor. We recommend GitHub actions instead (#1517). # usethis 2.1.6 ### GitHub-related * `use_github_action()` and friends gain a `ref` argument, which defaults to the tag of the latest release in (#1541). * `use_github_actions_badge()` now uses the same URLs as GitHub does via the "Create status badge" helper in the browser (#1525). This changes the significance of the `name` argument; now it really must be the name of the workflow configuration file. * All functions error more clearly when the requested operation is not supported for the "theirs" remote configuration (#1588). ### Other changes * `use_roxygen_md()` gains an `overwrite` argument (#1599). * `use_rscloud_badge()` is a new function that creates a README badge indicating the repository can be launched in an [RStudio Cloud](https://rstudio.cloud) project (@gvelasq, #1584). * `use_data()` gains an `ascii` argument, which is passed along to `save()` (@JosiahParry, #1625). * `use_code_of_conduct()` has been updated to version 2.1 of the Contributor Covenant (@batpigandme, #1591). # usethis 2.1.5 * pkgdown-related functions no longer automatically strip a trailing slash from the pkgdown site URL, in order to play more nicely with CRAN's URL checks (#1526). * `edit_pkgdown_config()` is a new function that opens the pkgdown YAML configuration file for the current Project, if such a file exists. * The error thrown when reporting an unsupported GitHub configuration has been fixed for forward compatibility with a future version of rlang, i.e. what is anticipated to be rlang v1.0.0. * Version 2.1.4 was never released. Version was advanced from 2.1.4 to 2.1.5 strictly for CRAN (re-)submission purposes. # usethis 2.1.3 * Modified a test to ensure that intermittent GitHub rate limiting does not lead to ungraceful failure on CRAN. # usethis 2.1.2 * `git_default_branch_rename()` no longer errors on repos where README exists, but has no badge block. * `git_default_branch_rediscover()` prunes the defunct remote ref to the old default branch, e.g. `origin/master`. * Version 2.1.1 was never released. Version was advanced from 2.1.1 to 2.1.2 strictly for CRAN (re-)submission purposes. # usethis 2.1.0 ## Git default branch support usethis has a more sophisticated understanding of the default branch and gains several functions to support default branch renaming. * `git_branch_default()` has been renamed to `git_default_branch()`, to place it logically in the new family of functions. The old name still works, but that won't be true forever. * `git_default_branch()` is much more diligent about figuring out the default branch. Instead of only consulting the local repo, now we integrate local info with the default branch reported by the `upstream` or `origin` remote, if applicable. - This is intended to surface the case where a project has renamed its default branch and the local repo needs sync up with that. * `git_default_branch_rediscover()` is a new function that helps contributors update their local repo (and personal fork, if applicable) when a project/repo renames its default branch. * `git_default_branch_rename()` is a new function that helps a repo owner rename the default branch (both on GitHub and locally). * `git_default_branch_configure()` is a new function to set the new Git configuration option `init.defaultBranch`, which controls the name of the initial branch of new local repos. * `git_sitrep()` exposes `init.defaultBranch` and surfaces the more sophisticated analysis of `git_default_branch()`. ## Other GitHub-related changes * `git_sitrep()` and `gh_token_help()` try even harder to help people get on the happy path with respect to their GitHub PAT (#1400, #1413, #1488, #1489, #1497). * The minimum version of gh has been bumped to help / force more people to upgrade to the gh version that supports current GitHub PAT formats (@ijlyttle, #1454). * `use_github_file()` is a new function related to `use_template()`. Instead of starting from a local file, `use_github_file()` grabs the contents of an arbitrary file on GitHub that the user has permission to read. It supports targeting a specific branch, tag, or commit and can follow a symlink (#1407). `use_github_file()` now powers `use_github_action()` and friends. * `use_github_release()` is much more diligent about using any information left behind by `devtools::submit_cran()` or `devtools::release()`. Specifically, this applies to determining which SHA is to be tagged in the release. And this SHA, in turn, determines the consulted versions of DESCRIPTION (for package version) and NEWS.md (for release notes) (#1380). * `use_release_issue()` also takes bullets from `release_questions()`, for compatibility with `devtools::release()`. * `git_vaccinate()`, `edit_git_ignore()`, and `git_sitrep()` are more careful to consult, reveal, and set the `core.excludesFile` setting in user's Git configuration (#1461). * `use_github_action_check_full()` has been removed. It's overkill for the majority of R packages, which are better off with `use_github_actions()` or `use_github_action_check_standard()` (#1490). * `use_github_pages()` and `use_pkgdown_github_pages()` use a new method for creating an empty, orphan `gh-pages` branch. This is necessary due to new GitHub behaviour, where it has become essentially impossible to refer to the empty tree (#1472). * `use_github()` can create repositories with `"internal"` visibility, a feature that exists within GitHub Enterprise products (#1505). ## Package development * `use_readme_rmd()` and `use_readme_md()` no longer include CRAN installation instructions in the initial template; instead, we only include GitHub-based install instructions or otherwise prompt the user to update instructions (#1507). * `use_import_from()` is a new function that puts `@importFrom pkg fun` directives into a package in a consistent location (@malcolmbarrett, #1377). * `DESCRIPTION` files generated by usethis no longer include `LazyData` by default, as per new CRAN checks; instead, `LazyData` is now added the first time you use `use_data()` (@malcolmbarrett, #1404). * `use_tidy_eval()` has been updated to reflect current recommendations for using (and therefore exposing) tidy eval in other packages (@lionel-, #1445). * `use_pkgdown()` automatically uses Bootstrap 5 if the pkgdown version supports it (anticipated for pkgdown 2.0.0). * `use_lifecycle()` now imports `lifecycle::deprecated()` (#1419). * `use_code_of_conduct()` now requires a `contact` argument to supply contact details for reporting CoC violations (#1269). * `use_package()` no longer guides the user on how to use a dependency when no change was made (@malcolmbarrett, #1384). ### Aimed at the tidyverse team These functions are exported for anyone to use, but are aimed primarily at the maintainers of tidyverse, r-lib, and tidymodels packages. * `use_tidy_dependencies()` is a new function that sets up standard dependencies used by all tidyverse packages, except those that are designed to be dependency free (#1423). * `use_tidy_upkeep_issue()` is a new function similar to `use_release_issue()` that creates a checklist-style issue to prompt various updates (#1416). * `use_tidy_release_test_env()` has been deleted since we no longer recommend including test environments in `cran-comments.md`. There's no evidence that CRAN finds it useful, and it's annoying to keep up-to-date (#1365). * `use_tidy_github_labels()` is the new name for `use_tidy_labels()` (#1430). * `use_tidy_github_actions()` takes over for `use_tidy_ci()`, which is now deprecated. ## User-level configuration * `"usethis.overwrite"` is a new option. When set to `TRUE`, usethis overwrites an existing file without asking for user confirmation if the file is inside a Git repo. The normal Git workflow makes it easy to see and selectively accept/discard any proposed changes. This behaviour is strictly opt-in (#1424). * Functions that provide code to load packages in your `.Rprofile` now use `rlang::check_installed()` to make sure the package is installed locally (@malcolmbarrett, #1398). * `edit_rstudio_prefs()` and `edit_rstudio_snippets()` should work now on case-sensitive OSes, due to a path fix re: the location of RStudio's config files (@charliejhadley, #1420). # usethis 2.0.1 * All functions that require a package now ask you if you'd like to install it. * Added `edit_template()` for opening and creating files in `inst/templates` (for use with `use_template()`) (@malcolmbarrett, #1319). * `use_article()` now creates the file in the `vignettes/articles/` (#548). * `use_lifecycle()` has been updated for changes in our lifecycle workflow (#1323). * `use_tidy_pkgdown()` has been renamed to `use_pkgdown_github_pages()` since the function is useful for anyone who wants to automatically publish to GitHub pages, not just the tidyverse team (#1308). * `use_release_issue()` includes a bunch of minor improvements. Most importantly, for initial CRAN release we now include a number of common things that CRAN checks for that aren't in `R CMD check`. * `use_readme_rmd()`, `use_readme_md()`, `use_tidy_contributing()`, and `use_tidy_support()` use updated logic for determining the `OWNER/REPO` spec of the target repo (#1312). # usethis 2.0.0 ## Adoption of gert and changes to Git/GitHub credential handling Usethis has various functions that help with Git-related tasks, which break down into two categories: 1. Git tasks, such as clone, push, and pull. These are things you could do with command line Git. 1. GitHub tasks, such as fork, release, and open an issue or pull request. These are things you could do in the browser or with the GitHub API. We've switched from git2r to the gert package for Git operations (). We continue to use the gh package for GitHub API work (). The big news in this area is that these lower-level dependencies are getting better at finding Git credentials, finding the same credentials as command line Git (and, therefore, the same as RStudio), and finding the same credentials as each other. This allows usethis to shed some of the workarounds we have needed in the past, to serve as a remedial "credential valet". Under the hood, both gert and gh are now consulting your local Git credential store, when they need credentials. At the time of writing, they are using two different even-lower-level packages to do this: * gert uses the credentials package () * gh uses the gitcreds package () Even now, gert and gh should discover the same credentials, at least for github.com. In the future, these two packages may merge into one. Git/GitHub credential management is covered in a new article: [Managing Git(Hub) Credentials](https://usethis.r-lib.org/articles/articles/git-credentials.html) The main user-facing changes in usethis are: * usethis should discover and use the same credentials as command line Git. * usethis should be able to work with any GitHub deployment. While github.com is the default, GitHub Enterprise deployments are fully supported. The target GitHub host is determined from the current project's configured GitHub remotes, whenever possible. As a result, several functions are deprecated and several other functions have some deprecated arguments. * Deprecated functions: - `use_git_credentials()` - `git_credentials()` - `github_token()` * Functions with (deprecated arguments): - `create_from_github()` (`auth_token`, `credentials`) - `use_github()` (`auth_token`, `credentials`) - `use_github_links()` (`host`, `auth_token`) - `use_github_labels()` (`repo_spec`, `host`, `auth_token`) - `use_tidy_labels()` (`repo_spec`, `host`, `auth_token`) - `use_github_release()` (`host`, `auth_token`) The switch to gert + credentials should eliminate most credential-finding fiascos. Gert also takes a different approach to wrapping libgit2, the underlying C library that does Git operations. The result is more consistent support for SSH and TLS, across all operating systems, without requiring special effort at install time. More users should enjoy Git remote operations that "just work", for both SSH and HTTPS remotes. There should be fewer "unsupported protocol" errors. ## GitHub remote configuration Usethis gains a more formal framework for characterizing a GitHub remote configuration. We look at: * Which GitHub repositories `origin` and `upstream` point to * Whether you can push to them * How they relate to each other, e.g. fork-parent relationship This is an internal matter, but users will notice that usethis is more clear about which configurations are supported by various functions and which are not. The most common configurations are reviewed in a [section of Happy Git](https://happygitwithr.com/common-remote-setups.html). When working in a fork, there is sometimes a question whether to target the fork or its parent repository. For example, `use_github_links()` adds GitHub links to the URL and BugReports fields of DESCRIPTION. If someone calls `use_github_links()` when working in a fork, they probably want those links to refer to the *parent* or *source* repo, not to their fork, because the user is probably preparing a pull request. Usethis should now have better default behaviour in these situations and, in some cases, will present an interactive choice. ## Default branch There is increasing interest in making the name of a repo's default branch configurable. Specifically, `main` is emerging as a popular alternative to `master`. Usethis now discovers the current repo's default branch and uses that everywhere that, previously, we had hard-wired `master`. `git_branch_default()` is a newly exported function that is also what's used internally. `use_course()`, `use_zip()`, and `create_download_url()` all have some support for forming the URL to download a `.zip` archive of a repo, based on a repo specification (e.g. `OWNER/REPO`) or a browser URL. These helpers now form a URL that targets `HEAD` of the repo, i.e. the default branch. ## Changes to Git/GitHub functionality The default Git protocol is now "https" and we no longer provide an interactive choice, by default, in interactive sessions. As always, a user can express a preference for "ssh" in individual function calls, for an R session via `use_git_protocol()`, and for all R sessions via the `usethis.protocol` option (#1262). `pr_resume()` is a new function for resuming work on an existing local PR branch. It can be called argument-less, to select a branch interactively. `pr_fetch()` can also be called with no arguments, to select a PR interactively. The `owner` argument is replaced by `target`, with a choice of the source (default) or primary repo. `pr_forget()` is a new function for abandoning a PR you initiated locally or fetched from GitHub. It only does local clean up and, for example, doesn't delete a remote branch or close a PR (#1263). `pr_view()` can now be called with no arguments. If the current branch is associated with an open PR, we target that and, otherwise, we offer an interactive selection. `pr_finish()` deletes the remote PR branch if the PR has been merged and the current user has the power to do so, i.e. an external contributor deleting their own branch or a maintainer deleting a branch associated with an internal PR (#1150). It no longer errors if the PR branch has already been deleted (#1196). `pr_pull_upstream()` is renamed to `pr_merge_main()` to emphasize that it merges the **main** line of development into the current branch, where the main line of development is taken to mean the default branch, as reported by `git_branch_default()`, of the source repo, which could be either `upstream` or `origin`, depending on the situation. `create_from_github()` will only create a read-only clone, due to lack of a GitHub personal access token, if explicitly directed to do so via `fork = FALSE`. `create_from_github()` and `use_tidy_thanks()` accept browser and Git URLs as the `repo_spec` argument, to be friendlier to copy/paste. When a URL is passed, the `host` is also extracted from it. `create_github_token()` is a new name for the function previously known as `browse_github_token()` and `browse_github_pat()`. `issue_close_community()` and `issue_reprex_needed()` are two new functions for maintainers who process lots of GitHub issues. They automate canned replies and actions, e.g. labelling or closing (#940). GitHub Actions is the preferred platform for continuous integration, because that is what the tidyverse team currently uses and maintains. Functions related to Travis-CI and AppVeyor are soft-deprecated to raise awareness about this change and to make it clear that, if substantial maintenance becomes necessary, we may elect to retire the function (#1169). `browse_github_actions()` is a new function to open the Actions page of the respective repo on GitHub, similar to existing `browse_*()` functions (@pat-s, #1102). `use_github_pages()` is a new function to activate or reconfigure the GitHub Pages site associated with a repository (#224). `use_tidy_pkgdown()` implements the complete pkgdown configuration used by the tidyverse team (#224). `pr_sync()` is deprecated and can be replicated by calling `pr_pull()`, `pr_merge_main()`, then `pr_push()`. ## Licensing improvements All `use_*_license()` functions now work for projects, not just packages. `use_apl2_license()` (not `use_apache_license()`) and `use_gpl3_license()` no longer modify the license text (#1198). `use_mit_license()` now sets the default copyright holder to "{package} authors". This makes it more clear that the copyright holders are the contributors to the package; unless you are using a CLA there is no one copyright holder of a package (#1207). New `use_gpl_license()` and `use_agpl_license()` make it easier to pick specific versions of the GPL and AGPL licenses, and to choose whether or not you include future versions of the license. Both default to version 3 (and above). New `use_proprietary_license()` allows your package to pass R CMD check while making it clear that your code is not open source (#1163). Thanks to @atheriel for the blog post suggesting the wording: https://unconj.ca/blog/copyright-in-closed-source-r-packages-the-right-way.html `use_lgpl_license()` now uses version 3 (and above), and gains new `version` and `include_future` argument to control which version is used. `use_gpl3_license()`, `use_agpl3_license()` and `use_apl2_license()` have been deprecated in favour of the new `version` argument to `use_gpl_license()`, `use_agpl_license()` and `use_apache_license()`. The `name` argument to `use_mit_license()` has been changed to `copyright_holder` to make the purpose more clear. The `name` argument has been removed from all other license functions because it is not needed; no other license makes an assertion about who the copyright holder is. ## RStudio preferences usethis is now fully cognizant of the [changes to RStudio preferences](https://posit.co/blog/rstudio-1-3-preview-configuration/) in RStudio 1.3: `edit_rstudio_snippets()` looks in the new location, and if you have snippets in the old location, will automatically copy them to the new location (#1204) New `edit_rstudio_prefs()` opens RStudio preferences file for editing (#1148). `use_blank_slate()` can now configure your global, i.e. user-level, RStudio preference, in addition to project-level (#1018). ## Other changes `browse_package()` and `browse_project()` are new functions that let the user choose from a list of URLs derived from local Git remotes and DESCRIPTION (local or possibly on CRAN) (#1113). The legacy `"devtools.desc"` option is no longer consulted when populating a new DESCRIPTION file. You must use the `"usethis.description"` now (#1069). `use_dev_package()` gains a `remote` parameter to allow you to specify the remote. The existing behaviour, which adds an `OWNER/REPO` GitHub remote, remains the default (#918, @ijlyttle). `use_cpp11()` is a new function to set up an R package to use cpp11. `create_package(roxygen = FALSE)` once again writes a valid NAMESPACE file (and also has no Roxygen* fields in DESCRIPTION) (#1120). `create_package()`, `create_project()`, `create_from_github()`, and `proj_activate()` work better with relative paths, inside and outside of RStudio (#1122, #954). `use_testthat()` gains an edition argument to support testthat v3.0.0 (#1185) `use_version()` now updates `src/version.c` if it exists and contains a line matching `PKG_version = "x.y.z";`. usethis has been re-licensed as MIT (#1252, #1253). ## Dependency changes New Imports: gert, jsonlite (was already an indirect dependency), lifecycle, rappdirs No longer in Imports: git2r, rematch2 # usethis 1.6.3 Patch release to refactor usage of withr in the tests for forward compatibility with an upcoming withr release. All changes are within the usethis tests. # usethis 1.6.1 Patch release to align some path handling internals with an update coming in the fs package. * `use_github_links()` is a bit more clever about remotes (e.g. `origin` vs. `upstream`), which makes it easier to make a PR that adds GitHub links for a package you've forked. * `use_pkgdown()` now `.gitignore`s the destination directory and only adds the destination directory to the config file if it departs from the default (which is `docs/`). * `use_tidy_ci()` is now deprecated in favour of `use_tidy_github_actions()` (#1098). * `use_github_action_check_standard()` is a new intermediate workflow that checks on more platforms than `_release`, but is less exhaustive than `_full` (@jimhester). * `create_tidy_package()` now uses an MIT license (@topepo, #1096). # usethis 1.6.0 ## GitHub actions * New `use_github_actions()`, `use_github_action_check_release()`, `use_github_action_check_full()`, `use_github_action_pr_commands()`, to set up GitHub Actions for a package (@jimhester). * We now recommend GitHub Actions instead of Travis-CI or AppVeyor, and strongly recommend upgrading your packages. * Fix `use_github_action()` URL parameter to ensure custom URLs are allowed. (@coatless, #1065). ## Package creation * `create_package()` gains a `roxygen` argument. If `TRUE` (the default), it adds a `RoxygenNote` field to the `DESCRIPTION` (which means the first run of `devtools::check()` will re-document the package, #963), and creates an empty `NAMESPACE` (which means you'll always need an explicit `@export` if you want to export functions, #927). It also turns markdown processing on by default (#911). * `use_rstudio()` now sets the `LineEndingConversion` to `Posix` so that packages created using usethis always use LF line endings, regardless of who contributes to them (#1002). * In the `usethis.description` option, you can now set `Authors@R = person()` directly, without having to wrap in additional layer of quotes. If setting this in your `.Rprofile`, you'll need to use `utils::person()` since the utils package isn't loaded until after your profile is executed. ## PR helpers * A new article [Pull request helpers](https://usethis.r-lib.org/articles/articles/pr-functions.html) demonstrates how to use the `pr_*()` functions (@mine-cetinkaya-rundel, #802). * `pr_finish()` checks that you don't have any local changes (#805), and can optionally finish any PR, not just the current (#1040). * `pr_pause()` and `pr_fetch()` now automatically pull to get latest changes (#959, #960) and refresh RStudio's git pane (#706). * `pr_push()` now works for a repository with no open pull requests (@maurolepore, #990). * `pr_pull()` gives more information about which files have merge conflicts and automatically opens conflicted files for editing (#1056). ## Other new features * New `rename_files()` makes it easy to rename paired `R/` and `test/` files (#784). * New `ui_silence()` makes it easier to selectively silence some UI output. * New `use_agpl3_license()` (@pachamaltese, #870). * New `use_data_table()` to set up a package for Import-ing `data.table` (@michaelchirico, #897). * `use_latest_dependencies()` replaces `use_tidy_version()` as the new name better reflect its usage (#771). * New `use_lifecycle()` helper to import the lifecycle badges for functions and arguments in your package. Learn more at . * `use_release_issue()` will include additional bullets if your package includes `release_bullets()` function which returns a character vector (and the package has been loaded with `load_all()`) (#941). ## Minor improvements and bug fixes * When writing files, usethis now respects line endings. Default line endings are taken from the `.Rproj` file (if available), otherwise the `DESCRIPTION`, otherwise the first file found in `R/`, then all else failing to your platform default (#767). It should do a better job of preserving UTF-8 files on windows (#969). * `browse_github()` now always goes to the canonical GitHub site: `https://github.com/user/repo`. This is slightly worse than the current behaviour but makes the function more consistent across packages, and considerably simplifies the implementation. * `browse_circle()` opens the project dashboard on Circle CI. * `create_download_url()` is a new helper for making "ZIP file download" URLs suitable for use with `use_course()` and `use_zip()`, starting with the URLs that mere mortals can usually get their hands on in a browser (@fmichonneau, #406). * `create_package()` no longer fails partway through if you have a malformed `usethis.description` option (#961). * `create_package()` will now create a package in a symlink to a directory (#794). * `create_package()` and `use_description()` gain a `check_name` argument to control whether to check for package names invalid for CRAN (@noamross, #883). * `edit_file()` and `use_test()` gain an `open` parameter that allows you to control whether or not the function is opened for editing by the user (#817). * `edit_rstudio_snippets()` makes it more clear which snippet types are allowed and that user's snippets mask the built-in snippets (@GegznaV, #885). * `git_sitrep()` now reports project-specific user name and email, if set (#837), and email(s) associated with your GitHub account (@dragosmg, #724). * `ui_yeah()` and `ui_nope()` allow you to override the default "yes" and "no" strings and to opt-out of shuffling (@rundel, #796). * `use_circleci()` uses correct delimiters in template (@jdblischak, #835). * `use_circleci_badge()` is now exported (@pat-s, #920). * `use_code_of_conduct()` now generates an absolute link to code of conduct on pkgdown website or original source to avoid R CMD check issues (#772). * `use_course()` and `use_zip()` are now equipped with some retry capability, to cope with intermittent failure or the need for a longer connect timeout (#988). * `use_data()` automatically bumps R dependency to 2.10 (#962). * `use_data_raw()` template quotes the dataset name correctly (#736, @mitchelloharawild). * `use_description_defaults()` now shows the default fields combined with any options that you have set. * `use_dev_package()` now supports packages installed from any remote type, not just GitHub (@antoine-sachet, #1071). * `use_git()` will now create initial commit if needed (#852). * `use_github_release()` no longer fails if you have no news bullets (#1048). * `use_github_release()` now tags the latest local commit instead of the latest remote commit on the default branch (@davidchall, #1029). * `use_gpl3_license()` now completes the license by providing additional information in a file named LICENSE, just like `use_mit_license()` and friends (@Cervangirard, #683). * `use_logo()` now generates the correct href if the pkgdown `url` is set (@mitchelloharawild, #986). * `use_make()` gains missing closing parenthesis (@ryapric, #804). * `use_markdown_template()` no longer uses an unexported function in its default arguments (@fmichonneau, #761). * `use_testthat()` and `use_test()` now work in projects, not just packages (#1017). * `use_test()` works on Windows when called without arguments (#901). * `use_tidy_issue_template()` uses current github format (@Maschette, #756). * `use_travis()`, `use_travis_badge()`, and `browse_travis()`, now default to `ext = "com"` since travis-ci.com is now recommended it over travis-ci.org (@riccardoporreca, #1038). * `use_release_issue()` reminds you to re-generate `README.md`, if needed (#767). * `use_r()` and `use_test()` throw a clear error if multiple names are provided (@strboul, #862). * `use_rcpp()` and `use_c()` now ensure `src/` contains at least one `.cpp` or `.c` placeholder file, so that the package can be built (@coatless, #720). * `usethis.destdir` is a new option that is consulted when deciding where to put a new folder created by `use_course()` or `create_from_github()` (@malcolmbarrett, #1015). * `use_lifecycle()` no longer adds the lifecycle package to the DESCRIPTION file. With the new roxygen markdown syntax for including badges, lifecycle has become a build-time dependency. ## Dependency changes New Imports: cli, rematch2, rlang. gh minimum version is bumped to v.1.1.0, due to changed behaviour around requests that return nothing. clisymbols is removed from Imports. # usethis 1.5.1 This is a patch release with various small features and bug fixes. ## Using the pipe `%>%` or the tidy eval toolkit in your package * The templates used by `use_pipe()` and `use_tidy_eval()` use a more robust form of cross-reference links, linking to files rather than topics. This should silence some warnings seen on Windows at install time (#730, #731 @jmgirard). * `use_pipe()` gains a logical `export` argument, so it can do the setup necessary to use the pipe operator when it is re-exported (`export = TRUE`, which is the default and preserves the previous behaviour) and when it is not (`export = FALSE`) (#783). ## Git, GitHub, and pull requests * `use_github()` removes newline `\n` characters from the description that can cause the initial push to fail (#493, @muschellij2). * `git_sitrep()` gives better feedback if we can't validate the GitHub PAT (#725, @ijlyttle). * `create_from_github()` sets remote tracking branch of `master` to `upstream/master`, when it creates (and clones) a fork (#792). * `pr_pause()` can switch back to master even if there is no remote tracking branch (#715, @cderv). ## Build tools and continuous integration * `use_tidy_ci()` is updated for R 3.6, meaning that R 3.2 is the oldest version of R supported through proactive testing. * `use_make()` and `use_jenkins()` add a Makefile and Jenkinsfile, respectively (#501, @ryapric). * `use_circleci()` creates a `.circleci/config.yaml` config file for CircleCI (#703, @jdblischak). ## Other * `use_zip()` is a new variant of `use_course()` that downloads and unpacks a ZIP file, with less pedantic behaviour re: the destination directory. Both functions now also work for ZIP files with MIME type `"application/x-zip-compressed"` (#573). * `use_version()` can detect `"(development version)"` in a NEWS header and update it with an actual version (#768, @DavisVaughan). ## Dependency changes R 3.1 is no longer explicitly supported or tested. Our general practice is to support the current release (3.6, at time of writing), devel, and the 4 previous versions of R (3.5, 3.4, 3.3, 3.2). fs minimum version is stated to be v1.3.0. glue minimum version is stated to be v1.3.0. # usethis 1.5.0 ## Git, GitHub (and GitLab) usethis gains several functions to inspect and manipulate the Git situation for the current project = repository. We also provide more control and visibility into git2r's workings, especially around credentials (usethis uses git2r for all Git operations). * `git_sitrep()` lets you know what's up with your Git, git2r and GitHub config (#328). * `git_vaccinate()` vaccinates your global (i.e. user-level) git ignore file. It adds standard entries for R users, such as `.Rhistory` and `.Rdata`. This decreases the chance that you commit and push files containing confidential information (#469). * `git_remotes()` and `use_git_remote()` are new helpers to inspect or modify Git remote URLs for the repo associated with the active project (#649). * `git_protocol()` + `use_git_protocol()` and `git_credentials()` + `use_git_credentials()` are new helpers to summon or set Git transport protocol (SSH or HTTPS) or git2r credentials, respectively. These functions are primarily for internal use. Most users can rely on default behaviour. Use these helpers to intervene if git2r isn't discovering the right credentials (#653). usethis honors the `usethis.protocol` option, which allows you to express a general preference for SSH vs. HTTPS. Other improvements and bug fixes: * `use_github()` tries harder but also fails earlier, with more informative messages, making it less likely to leave the repo partially configured (#221). * `use_github()` and `create_from_github()` gain a `protocol` argument (#494, @cderv). * `create_from_github()` pulls from upstream master in a fork (#695, @ijlyttle). * `use_release_issue()` creates a GitHub issue containing a release checklist, reflecting the standard practices of the tidyverse team (#338). * `use_github_release()` creates a draft GitHub release using the entries in `NEWS.md` (#137). * `use_gitlab_ci()` creates a `gitlab-ci.yaml` config file for GitLab CI (#565, @overmar). * `use_git_config()` now invisibly returns the previous values of the settings. * `use_github_labels()` has been rewritten be more flexible. You can now supply a repo name, and `descriptions`, and you can set colours/descriptions independently of creating labels. You can also `rename` existing labels (#290). ## GitHub pull requests We've added **experimental** functions to work with GitHub pull requests. They are aimed at both a maintainer (who may make, review, and modify pull requests) and a contributor (who may make or explore pull requests). * `git_sitrep()` includes a section at the end aimed at describing "pull request readiness". Expect that to develop and expand. * `pr_init()`, `pr_fetch()`, `pr_push()`, `pr_pull()`, `pr_finish()`, and `pr_view()` constitute the new family of helpers. They are designed to be smart about the significance of remotes with the standard names of `origin` and `upstream` and to facilitate both internal and external pull requests. ## Partial file management usethis gains tooling to manage part of a file. This is currently used for managing badges in your README and roxygen import tags: * `use_badge()` and friends now automatically add badges if your README contains a specially formatted badge block (#497): ``` <-- badge:start --> <-- badge:end --> ``` * `use_tibble()` and `use_rcpp()` automatically add roxygen tags to to `{package}-package.R` if it contains a specially formatted namespace block (#517): ```R ## usethis namespace: start ## usethis namespace: end NULL ``` Unfortunately this means that `use_rcpp()` no longer supports non-roxygen2 workflows, but I suspect the set of people who use usethis and Rcpp but not roxygen2 is very small. ## Extending and wrapping usethis * New `proj_activate()` lets you activate a project, either opening a new RStudio session (if you use RStudio) or changing the working directory (#511). * `proj_get()` and `proj_set()` no longer have a `quiet` argument. The user-facing message about setting a project is now under the same control as other messages, i.e. `getOption("usethis.quiet", default = FALSE)` (#441). * A new set of `ui_*()` functions makes it possible to give your own code the same user interface as usethis (#308). All use the glue and crayon and packages to power easy interpolation and formatting. There are four families of functions: * block styles: `ui_line()`, `ui_done()`, `ui_todo()`, `ui_oops()`, `ui_info()`. * conditions: `ui_stop()`, `ui_warn()`. * questions: `ui_yeah()`, `ui_nope()`. * inline styles: `ui_field()`, `ui_value()`, `ui_path()`, `ui_code()`. * `with_project()` and `local_project()` are new withr-style functions to temporarily set an active usethis project. They make usethis functions easier to use in an *ad hoc* fashion or from another package (#441). ## Tidyverse standards These standards are (aspirationally) used by all tidyverse packages; you are welcome to use them if you find them helpful. * Call `use_tidy_labels()` to update GitHub labels. Colours are less saturated, docs is now documentation, we use some emoji, and performance is no longer automatically added to all repos (#519). Repo specific issues should be given colour `#eeeeee` and have an emoji. * Call `use_logo()` to update the package logo to the latest specifications: `man/figure/logo.png` should be 240 x 278, and README should contain ``. This gives a nicer display on retina displays. The logo is also linked to the pkgdown site if available (#536). * When creating a new package, use `create_tidy_package()` to start with a package following the tidyverse standards (#461). * `NEWS.md` for the development version should use "(development version)" rather than the specific version (#440). * pkgdown sites should now be built by travis and deployed automatically to GitHub pages. `use_pkgdown_travis()` will help you set that up. * When starting the release process, call `use_release_issue()` to create a release checklist issue (#338). * Prior to CRAN submission call `use_tidy_release_test_env()` to update the test environment section in `cran-comments()` (#496). * After acceptance, try `use_github_release()` to automatically create a release. It's created as a draft so you have a chance to look over before publishing. * `use_vignette()` includes the a standard initialisation chunk with `knitr::opts_chunk$set(comment = "#>", collapse = TRUE)` which should be used for all Rmds. ## New functions not already mentioned * `use_devtools()` (#624), `use_conflicted()` (#362), and `use_reprex()` (#465) help add useful packages to your `.Rprofile`. * `use_partial_warnings()` helps the user add a standard warning block to `.Rprofile` (#64). * `edit_r_buildignore()` opens `.Rbuildignore` for manual editing (#462, @bfgray3). * `use_lgpl_license()` automates set up of the LGL license (#448, @krlmlr). * `use_ccby_license()` adds a CCBY 4.0 license (#547, @njtierney). * `use_rcpp_armadillo()` and `use_rcpp_eigen()` set up a package to use RcppArmadillo or RcppEigen, respectively (#421, @coatless, @duckmayr). * `use_c("foo")` sets up `src/` and creates `src/foo.c` (#117). * `use_covr_ignore()` makes it easy to ignore files in test coverage (#434). * `use_pkgdown_travis()` helps you set up pkgdown for automatic build-and-deploy from Travis-CI to GitHub Pages (#524). * `use_addin()` does setup for RStudio addins (#353, @haozhu233). * `use_tutorial()` creates a new interactive R Markdown tutorial, as implemented by the [`learnr` package](https://rstudio.github.io/learnr/index.html) (@angela-li, #645). * `use_article()` creates articles, vignettes that are automatically added to `.Rbuildignore`. These appear on pkgdown sites, but are not included with the package itself (#281). * `use_citation()` creates a basic `CITATION` template and puts it in the right place (#100). ## Other minor bug fixes and improvements * `write_union()` appends the novel `lines`, but does not remove duplicates from existing lines (#583, @khailper). * `use_rcpp("foo")` now creates `src/foo.cpp` (#117). * `use_data()` gains a `version` argument and defaults to serialization format version 2 (#675). * `use_data_raw()` accepts a name for the to-be-prepared dataset and opens a templated R script (#646). * `browse_github()` now falls back to CRAN organisation (with a warning) if package doesn't have its own GitHub repo (#186). * `create_*()` restore the active project if they error part way through, and use `proj_activate()` (#453, #511). * `edit_r_profile()` and `edit_r_environ()` now respect environment variables `R_PROFILE_USER` and `R_ENVIRON_USER`, respectively (#480). * `use_description()` once again prints the generated description (#287). * `use_description_field()` is no longer sensitive to whitespace, which allows `use_vignette()` to work even if the `VignetteBuilder` field is spread over multiple lines (#439). * `use_logo()` can override existing logo if user gives permission (#454). It also produces retina appropriate logos by default, and matches the aspect ratio to the specification (#499). * `use_news_md()` will optionally commit. * `use_package()` gains a `min_version` argument to specify a minimum version requirement (#498). Set to `TRUE` to use the currently installed version (#386). This is used by `use_tidy_eval()` in order to require version 0.1.2 or greater of rlang (#484). * `use_pkgdown()` is now configurable with site options (@jayhesselberth, #467), and no longer creates the `docs/` directory (#495). * `use_test()` no longer forces the filename to be lowercase (#613, @stufield). * `use_test()` will not include a `context()` in the generated file if used with testthat 2.1.0 and above (the future release of testthat) (#325). * `use_tidy_description()` sets the `Encoding` field in `DESCRIPTION` (#502, @krlmlr). * `use_tidy_eval()` re-exports `:=` (#595, @jonthegeek). * `use_tidy_versions()` has source argument so that you can choose to use local or CRAN versions (#309). * `use_travis()` gains an `ext` argument, defaulting to `"org"`. Use `ext = "com"` for `https://travis-ci.com` (@cderv, #500). * `use_version()` asks before committing. * `use_vignette()` now has a `title` argument which is used in YAML header (in the two places where it is needed). The vignettes also lose the default author and date fields (@rorynolan, #445), and the R Markdown starter material. They gain a standard setup chunk. * `use_version("dev")` now creates a standard "(development version)" heading in `NEWS.md` (#440). * `use_vignette()` now checks if the vignette name is valid (starts with letter and consists of letters, numbers, hyphen, and underscore) and throws an error if not (@akgold, #555). * `restart_rstudio()` now returns `FALSE` in RStudio if no project is open, fixing an issue that caused errors in helpers that suggest restarting RStudio (@gadenbuie, #571). ## Dependency changes * withr moves from Suggests to Imports. * purrr and yaml are new in Imports. # usethis 1.4.0 ## File system All usethis file system operations now use the [fs](https://fs.r-lib.org) package (#177). This should not change how usethis functions, but users may notice these features of fs-mediated paths: - Paths are "tidy", meaning `/` is the path separator and there are never multiple or trailing `/`. - Paths are UTF-8 encoded. - A Windows user's home directory is interpreted as `C:\Users\username` (typical of Unix-oriented tools, like Git and ssh; also matches Python), as opposed to `C:\Users\username\Documents` (R's default on Windows). Read more in [`fs::path_expand()`](https://fs.r-lib.org/reference/path_expand.html). ## Extending or wrapping usethis These changes make it easier for others to extend usethis, i.e. to create workflow packages specific to their organization, or to use usethis in other packages. * `proj_path()` is newly exported. Use it to build paths within the active project. Like `proj_get()` and `proj_set()`, it is not aimed at end users, but rather for use in extension packages. End users should use [rprojroot](https://rprojroot.r-lib.org) or its simpler companion, [here](https://here.r-lib.org), to programmatically detect a project and build paths within it (#415, #425). * `edit_file()`, `write_over()`, and `write_union()` are newly exported helpers. They are mostly for internal use, but can also be useful in packages that extend or customize usethis (#344, #366, #389). * `use_template()` no longer errors when a user chooses not to overwrite an existing file and simply exits with confirmation that the file is unchanged (#348, #350, @boshek). * `getOption("usethis.quiet", default = FALSE)` is consulted when printing user-facing messages. Set this option to `TRUE` to suppress output, e.g., to use usethis functions quietly in another package. For example, use `withr::local_options(list(usethis.quiet = TRUE))` in the calling function (#416, #424). ## New functions * `proj_sitrep()` reports current working directory, the active usethis project, and the active RStudio Project. Call this function if things seem weird and you're not sure what's wrong or how to fix it. Designed for interactive use and debugging, not for programmatic use (#426). * `use_tibble()` does minimum setup necessary for a package that returns or exports a tibble. For example, this guarantees a tibble will print as a tibble (#324 @martinjhnhadley). * `use_logo()` resizes and adds a logo to a package (#358, @jimhester). * `use_spell_check()` adds a whitelist of words and a unit test to spell check package documentation during `R CMD check` (#285 @jeroen). ## Other small changes and bug fixes * usethis has a new logo! (#429) * `use_course()` reports progress during download (#276, #380). * `use_git()` only makes an initial commit of all files if user gives explicit consent (#378). * `create_from_github()`: the `repo` argument is renamed to `repo_spec`, since it takes input of the form "OWNER/REPO" (#376). * `use_depsy_badge()` is deprecated. The Depsy project has officially concluded and is no longer being maintained (#354). * `use_github()` fails earlier, with a more informative message, in the absence of a GitHub personal access token (PAT). Also looks for the PAT more proactively in the usual environment variables (i.e., GITHUB_PAT, GITHUB_TOKEN) (#320, #340, @cderv). * The logic for setting DESCRIPTION fields in `create_package()` and `use_description()` got a Spring Cleaning. Fields directly specified by the user take precedence, then the named list in `getOption("usethis.description")` is consulted, and finally defaults built into usethis. `use_description_defaults()` is a new function that reveals fields found in options and built into usethis. Options specific to one DESCRIPTION field, e.g. `devtools.desc.license`, are no longer supported. Instead, use a single named list for all fields, preferably stored in an option named `"usethis.description"` (however,`"devtools.desc"` is still consulted for backwards compatibility). (#159, #233, #367) ## Dependency changes New Imports: fs, glue, utils No longer in Imports: backports, httr, rematch2, rmarkdown (moved to Suggests), styler (moved to Suggests) # usethis 1.3.0 * usethis has a website: (#217). It includes an article with advice on system setup, for usethis and for R development more generally. * `edit_*()` functions now return the target path, invisibly (#255). * `edit_git_ignore(scope = "user")` prefers `~/.gitignore`, but detects an existing `~/.gitignore_global`, if it exists. If a new global gitignore file is created, it is created as `~/.gitignore` and recorded in user's git config as the `core.excludesfile` (#255). * `create_from_github()` gains several arguments and new functionality. The `protocol` argument lets user convey whether remote URLs should be ssh or https. In the case of "fork and clone", the original repo is added as `upstream` remote. It is now possible -- although rarely necessary -- to directly specify the GitHub PAT, credentials (in git2r form), and GitHub host (#214, #214, #253). * `use_github_labels()` can create or update the colour of arbitrary GitHub issue labels, defaulting to a set of labels and colours used by the tidyverse packages, which are now exposed via `tidy_labels()`. That set now includes the labels "good first issue" and "help wanted" (#168, #249). * `appveyor_info()` no longer reverses the repo's URL and image link. Corrects the markdown produced by `use_appveyor_badge()` (#240, @llrs). * `use_cran_badge()` uses an HTTPS URL for the CRAN badge image (#235, @jdblischak). * `create_package()` and `create_project()` return a normalized path, even if target directory does not pre-exist (#227, #228). ## New functions * `use_git_config()` can set user's Git name or email, globally or locally in a project/repo (#267). * `browse_github_pat()` goes to the webpage where a GitHub user can create a personal access token (PAT) for the GitHub API. If the user configures a PAT, they can use functions like `create_from_github()` and `use_github()` to easily create and connect GitHub repos to local projects. (#248, #257, @jeroen, via @jennybc). * `use_version()` increments the version of the active package, including an interactive chooser. `use_dev_version()` is now a special case wrapper around this. (#188, #223, @EmilHvitfeldt). * `use_tidy_github()` creates a standard set of files that make a GitHub repository more navigable for users and contributors: an issue template, contributing guidelines, support documentation, and a code of conduct. All are now placed in a `.github/` subdirectory (#165, @batpigandme). * `use_bioc_badge` creates a Bioconductor badge that links to the build report (#271, @LiNk-NY). * `use_binder_badge()` creates a badge indicating the repository can be launched in an executable environment via [Binder](https://mybinder.org/) (#242, @uribo). # usethis 1.2.0 ## New functions * `use_course()` downloads a folder's worth of materials from a ZIP file, with deliberate choices around the default folder name and location. Developed for use at the start of a workshop. Helps participants obtain materials from, e.g., a DropBox folder or GitHub repo (#196). * `use_blank_slate()` provides a way to opt in to an RStudio workflow where the user's workspace is neither saved nor reloaded between R sessions. Automated for `scope = "project"`. Provides UI instructions for `scope = "user"`, for now (#139). * `use_tidy_style()` styles an entire project according to (#72, #197 @lorenzwalthert). * GitHub conventions common to tidyverse packages are enacted by `use_tidy_contributing()`, `use_tidy_issue_template()`, and `use_tidy_support()` (@batpigandme, #143, #166). Other changes * New projects that don't exhibit other obvious criteria for being a "project" will include a sentinel, empty file named `.here`, so they can be recognized as a project. * Project launching and switching works on RStudio server (#115, #129). * `use_template()` is newly exported, so that other packages can provide templating functions using this framework (@ijlyttle #120). * `use_readme_rmd()` and `use_readme_md()` work, in a similar fashion, for projects that are and are not a package (#131, #135). * `use_readme_rmd()` once again creates a pre-commit git hook, to help keep `README.Rmd` and `README.md` in sync (@PeteHaitch #41). * Substantial increase in unit test coverage. # usethis 1.1.0 ## New helpers * `browse_github()`, `browse_github_issues()`, `browse_github_pulls()`, `browse_cran()` and `browse_travis()` open useful websites related to the current project or a named package. (#96, #103). * `create_from_github()` creates a project from an existing GitHub repository, forking if needed (#109). * `use_cc0_license()` applies a CC0 license, particularly appropriate for data packages (#94) * `use_lifecycle_badge()` creates a badge describing current stage in project lifecycle (#48). * `use_pkgdown()` creates the basics needed for a [pkgdown](https://github.com/r-lib/pkgdown) website (#88). * `use_r("foo")` creates and edit `R/foo.R` file. If you have a test file open, `use_r()` will open the corresponding `.R` file (#105). * `use_tidy_versions()` sets minimum version requirement for all dependencies. ## Bug fixes and improvements * `use_dev_version()` now correctly updates the `Version` field in a package description file. (@tjmahr, #104) * `use_revdep()` now also git-ignores the SQLite database (#107). * `use_tidy_eval()` has been tweaked to reflect current guidance (#106) # usethis 1.0.0 This is a new package that extracts out many functions that previously lived in devtools, as well as providing more building blocks so you can create your own helpers. As well as the many new helpers listed below, there are three main improvements to the package: * More support for general R projects, other than packages. * A notion of an "active" project that all commands operate on. * Refined output. usethis is gradually evolving towards supporting more general R "projects", not just packages. This is still a work in progress, so please let me know if you use a function that you think should work with projects but doesn't. You can also try out the new `create_project()` which creates a basic RStudio project. The concept of the working directory and the "base path" have been refined. Rather than using an argument to specify the active project, all `use_` functions now use a global active project setting, as returned by `proj_get()`. This is cached throughout a session, although it will be updated by `create_package()` and `create_project()`. You'll now get an clear error if you attempt to `use_something()` outside of a project, and `create_something()` will warn if you're trying to create inside an existing project. The output from all usethis commands has been reviewed to be informative but not overwhelming. usethis takes advantage of colour (using crayon and RStudio 1.1) to help chunk the output and clearly differentiate what you need to do vs. what has been done for you. ## New functions * `use_apl2_license()` if you want to use the Apache 2.0 license. * `use_depsy_badge()` allows including a Depsy badge (@gvegayon, #68). * `use_dev_package()` works like `use_package()` but also adds the repo to the `Remotes` field (#32). * `use_github_labels()` will automatically set up a standard set of labels, optionally removing the default labels (#1). * `use_pipe()` creates a template to use magrittr's `%>%` in your package (#15). * `use_tidy_ci()` which sets up travis and codecov using the tidyverse conventions (#14) * `use_tidy_description()` puts description fields in a standard order and alphabetises dependencies. * `use_tidy_eval()` imports and re-exports the recommend set of tidy eval helpers if your package uses tidy eval (#46). * `use_usethis()` opens your `.Rprofile` and gives you the code to copy and paste in. ## New edit functions A new class of functions make it easy to edit common config files: * `edit_r_profile_user()` opens `.Rprofile` * `edit_r_environ_user()` opens `.Renviron` * `edit_r_makevars_user()` opens `.R/Makevars` * `edit_git_config_user()` opens `.gitconfig` * `edit_git_ignore_user()` opens `.gitignore` * `edit_rstudio_snippets(type)` opens `~/R/snippets/{type}.snippets` ## Updates * `use_coverage("codecov")` now sets a default threshold of 1% to try and reduce false positives (#8). * `use_description()` now sets `ByteCompile: true` so you can benefit from the byte compiler (#29) * The license functions (`use_mit_license()`, `use_apl2_license()`, and `use_gpl3_license()`) save a copy of the standard license text in `LICENSE.md`, which is then added to `.Rbuildignore`. This allows you to follow standard licensing best practices while adhering to CRANs requirements (#10). * `use_package_doc()` uses more a modern roxygen2 template that requires less duplication. * `use_test()` will use the name of the currently open file in RStudio if you don't supply an explicit name (#89). * `use_readme_rmd()` now puts images in `man/figures/` and no longer adds to `.Rbuildgnore`. This ensures that the rendered `README.md` will also work on CRAN (#16, #19). The first chunk now uses `include = FALSE` and is named setup (#19). * `use_revdep()` creates structure for use with revdepcheck package, the preferred way to run revdepchecks. (#33) ## Building blocks * New `use_badge()` for adding any badge to a README. Now only prints a todo message if the badge does not already exist. * `use_directory()` is now exported (#27). ## Bug fixes and minor improvements * Functions which require code to be copied now automatically put the code on the clipboard if it is available (#52). * `create_package()` no longer creates a dependency on the current version of R. * `use_build_ignore()` now strips trailing `/` * `use_git()` will restart RStudio if needed (and possible) (#42). * `use_github()` now has an organisation parameter so you can create repos in organisations (#4). * `use_template()` and `use_test()` now convert title to a slug that only contains lowercase letters, numbers, and `-`. * `use_vignette()` now adds `*.html` and `*.R` to your `.gitgnore` so you don't accidentally add in compiled vignette products (#35). * `use_travis_badge()` and `use_appveyor_badge()` are now exported functions, so they can be used even if ci was separately set up (#765, @smwindecker). usethis/inst/0000755000176200001440000000000014651000165012711 5ustar liggesusersusethis/inst/templates/0000755000176200001440000000000014721173101014706 5ustar liggesusersusethis/inst/templates/vscode-c_cpp_properties.json0000644000176200001440000000105414651000165022423 0ustar liggesusers{ "configurations": [ { "name": "Mac", "includePath": [ "${workspaceFolder}/**",{{{ linking_to_includes }}} "/Library/Frameworks/R.framework/Resources/include", "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include", "/usr/local/include" ], "macFrameworkPath": [ "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/System/Library/Frameworks" ], "compilerPath": "/usr/bin/clang", "cStandard": "c99", "cppStandard": "c++11" } ], "version": 4 } usethis/inst/templates/codecov.yml0000644000176200001440000000035014651000165017052 0ustar liggesuserscomment: false coverage: status: project: default: target: auto threshold: 1% informational: true patch: default: target: auto threshold: 1% informational: true usethis/inst/templates/pipe.R0000644000176200001440000000055314651000165015772 0ustar liggesusers#' Pipe operator #' #' See \code{magrittr::\link[magrittr:pipe]{\%>\%}} for details. #' #' @name %>% #' @rdname pipe #' @keywords internal #' @export #' @importFrom magrittr %>% #' @usage lhs \%>\% rhs #' @param lhs A value or the magrittr placeholder. #' @param rhs A function call using the magrittr semantics. #' @return The result of calling `rhs(lhs)`. NULL usethis/inst/templates/Jenkinsfile0000644000176200001440000000043414651000165017074 0ustar liggesuserspipeline { agent any { stages { stage('Build') { steps { make build } } stage('Check') { steps { make check } } stage('Clean') { steps { make clean } } } } } usethis/inst/templates/license-GPL-3.md0000644000176200001440000010413014651000165017432 0ustar liggesusersGNU General Public License ========================== _Version 3, 29 June 2007_ _Copyright © 2007 Free Software Foundation, Inc. <>_ Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. ## Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: **(1)** assert copyright on the software, and **(2)** offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. ## TERMS AND CONDITIONS ### 0. Definitions “This License†refers to version 3 of the GNU General Public License. “Copyright†also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. “The Program†refers to any copyrightable work licensed under this License. Each licensee is addressed as “youâ€. “Licensees†and “recipients†may be individuals or organizations. To “modify†a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a “modified version†of the earlier work or a work “based on†the earlier work. A “covered work†means either the unmodified Program or a work based on the Program. To “propagate†a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To “convey†a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays “Appropriate Legal Notices†to the extent that it includes a convenient and prominently visible feature that **(1)** displays an appropriate copyright notice, and **(2)** tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. ### 1. Source Code The “source code†for a work means the preferred form of the work for making modifications to it. “Object code†means any non-source form of a work. A “Standard Interface†means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The “System Libraries†of an executable work include anything, other than the work as a whole, that **(a)** is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and **(b)** serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A “Major Componentâ€, in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The “Corresponding Source†for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. ### 2. Basic Permissions All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. ### 3. Protecting Users' Legal Rights From Anti-Circumvention Law No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. ### 4. Conveying Verbatim Copies You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. ### 5. Conveying Modified Source Versions You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: * **a)** The work must carry prominent notices stating that you modified it, and giving a relevant date. * **b)** The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to “keep intact all noticesâ€. * **c)** You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. * **d)** If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an “aggregate†if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. ### 6. Conveying Non-Source Forms You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: * **a)** Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. * **b)** Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either **(1)** a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or **(2)** access to copy the Corresponding Source from a network server at no charge. * **c)** Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. * **d)** Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. * **e)** Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A “User Product†is either **(1)** a “consumer productâ€, which means any tangible personal property which is normally used for personal, family, or household purposes, or **(2)** anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, “normally used†refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. “Installation Information†for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. ### 7. Additional Terms “Additional permissions†are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: * **a)** Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or * **b)** Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or * **c)** Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or * **d)** Limiting the use for publicity purposes of names of licensors or authors of the material; or * **e)** Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or * **f)** Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered “further restrictions†within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. ### 8. Termination You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated **(a)** provisionally, unless and until the copyright holder explicitly and finally terminates your license, and **(b)** permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. ### 9. Acceptance Not Required for Having Copies You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. ### 10. Automatic Licensing of Downstream Recipients Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An “entity transaction†is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. ### 11. Patents A “contributor†is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's “contributor versionâ€. A contributor's “essential patent claims†are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, “control†includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a “patent license†is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To “grant†such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either **(1)** cause the Corresponding Source to be so available, or **(2)** arrange to deprive yourself of the benefit of the patent license for this particular work, or **(3)** arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. “Knowingly relying†means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is “discriminatory†if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license **(a)** in connection with copies of the covered work conveyed by you (or copies made from those copies), or **(b)** primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. ### 12. No Surrender of Others' Freedom If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. ### 13. Use with the GNU Affero General Public License Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. ### 14. Revised Versions of this License The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License “or any later version†applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. ### 15. Disclaimer of Warranty THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS†WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. ### 16. Limitation of Liability IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. ### 17. Interpretation of Sections 15 and 16 If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. _END OF TERMS AND CONDITIONS_ ## How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the “copyright†line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type 'show w'. This is free software, and you are welcome to redistribute it under certain conditions; type 'show c' for details. The hypothetical commands `show w` and `show c` should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an “about boxâ€. You should also get your employer (if you work as a programmer) or school, if any, to sign a “copyright disclaimer†for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see <>. The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read <>. usethis/inst/templates/test-example-2.1.R0000644000176200001440000000010014651000165017727 0ustar liggesuserstest_that("multiplication works", { expect_equal(2 * 2, 4) }) usethis/inst/templates/license-cc0.md0000644000176200001440000001546614651000165017332 0ustar liggesusers## creative commons # CC0 1.0 Universal CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED HEREUNDER. ### Statement of Purpose The laws of most jurisdictions throughout the world automatically confer exclusive Copyright and Related Rights (defined below) upon the creator and subsequent owner(s) (each and all, an "owner") of an original work of authorship and/or a database (each, a "Work"). Certain owners wish to permanently relinquish those rights to a Work for the purpose of contributing to a commons of creative, cultural and scientific works ("Commons") that the public can reliably and without fear of later claims of infringement build upon, modify, incorporate in other works, reuse and redistribute as freely as possible in any form whatsoever and for any purposes, including without limitation commercial purposes. These owners may contribute to the Commons to promote the ideal of a free culture and the further production of creative, cultural and scientific works, or to gain reputation or greater distribution for their Work in part through the use and efforts of others. For these and/or other purposes and motivations, and without any expectation of additional consideration or compensation, the person associating CC0 with a Work (the "Affirmer"), to the extent that he or she is an owner of Copyright and Related Rights in the Work, voluntarily elects to apply CC0 to the Work and publicly distribute the Work under its terms, with knowledge of his or her Copyright and Related Rights in the Work and the meaning and intended legal effect of CC0 on those rights. 1. __Copyright and Related Rights.__ A Work made available under CC0 may be protected by copyright and related or neighboring rights ("Copyright and Related Rights"). Copyright and Related Rights include, but are not limited to, the following: i. the right to reproduce, adapt, distribute, perform, display, communicate, and translate a Work; ii. moral rights retained by the original author(s) and/or performer(s); iii. publicity and privacy rights pertaining to a person's image or likeness depicted in a Work; iv. rights protecting against unfair competition in regards to a Work, subject to the limitations in paragraph 4(a), below; v. rights protecting the extraction, dissemination, use and reuse of data in a Work; vi. database rights (such as those arising under Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, and under any national implementation thereof, including any amended or successor version of such directive); and vii. other similar, equivalent or corresponding rights throughout the world based on applicable law or treaty, and any national implementations thereof. 2. __Waiver.__ To the greatest extent permitted by, but not in contravention of, applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and unconditionally waives, abandons, and surrenders all of Affirmer's Copyright and Related Rights and associated claims and causes of action, whether now known or unknown (including existing as well as future claims and causes of action), in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each member of the public at large and to the detriment of Affirmer's heirs and successors, fully intending that such Waiver shall not be subject to revocation, rescission, cancellation, termination, or any other legal or equitable action to disrupt the quiet enjoyment of the Work by the public as contemplated by Affirmer's express Statement of Purpose. 3. __Public License Fallback.__ Should any part of the Waiver for any reason be judged legally invalid or ineffective under applicable law, then the Waiver shall be preserved to the maximum extent permitted taking into account Affirmer's express Statement of Purpose. In addition, to the extent the Waiver is so judged Affirmer hereby grants to each affected person a royalty-free, non transferable, non sublicensable, non exclusive, irrevocable and unconditional license to exercise Affirmer's Copyright and Related Rights in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "License"). The License shall be deemed effective as of the date CC0 was applied by Affirmer to the Work. Should any part of the License for any reason be judged legally invalid or ineffective under applicable law, such partial invalidity or ineffectiveness shall not invalidate the remainder of the License, and in such case Affirmer hereby affirms that he or she will not (i) exercise any of his or her remaining Copyright and Related Rights in the Work or (ii) assert any associated claims and causes of action with respect to the Work, in either case contrary to Affirmer's express Statement of Purpose. 4. __Limitations and Disclaimers.__ a. No trademark or patent rights held by Affirmer are waived, abandoned, surrendered, licensed or otherwise affected by this document. b. Affirmer offers the Work as-is and makes no representations or warranties of any kind concerning the Work, express, implied, statutory or otherwise, including without limitation warranties of title, merchantability, fitness for a particular purpose, non infringement, or the absence of latent or other defects, accuracy, or the present or absence of errors, whether or not discoverable, all to the greatest extent permissible under applicable law. c. Affirmer disclaims responsibility for clearing rights of other persons that may apply to the Work or any use thereof, including without limitation any person's Copyright and Related Rights in the Work. Further, Affirmer disclaims responsibility for obtaining any necessary consents, permissions or other rights required for any use of the Work. d. Affirmer understands and acknowledges that Creative Commons is not a party to this document and has no duty or obligation with respect to this CC0 or use of the Work. usethis/inst/templates/article.Rmd0000644000176200001440000000025114651000165016774 0ustar liggesusers--- title: "{{{ vignette_title }}}" --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) ``` ```{r setup} library({{Package}}) ``` usethis/inst/templates/vscode-launch.json0000644000176200001440000000115614651000165020340 0ustar liggesusers{ "version": "0.2.0", "configurations": [ { "name": "(lldb) Launch R", "type": "lldb", "request": "launch", "program": "/Library/Frameworks/R.framework/Resources/bin/exec/R", "args": [ "--vanilla", "-e", "source('debug/debug.R')" ], "env": { "R_HOME" : "/Library/Frameworks/R.framework/Resources" }, "terminal": "console", "stopOnEntry": false }, { "name": "(lldb) Attach to R", "type": "lldb", "request": "attach", "pid": "${command:pickMyProcess}", "stopOnEntry": false } ] } usethis/inst/templates/vignette.qmd0000644000176200001440000000042014721145632017242 0ustar liggesusers--- title: "{{{ vignette_title }}}" vignette: > %\VignetteIndexEntry{{{ braced_vignette_title }}} %\VignetteEngine{quarto::html} %\VignetteEncoding{UTF-8} knitr: opts_chunk: collapse: true comment: '#>' --- ```{r} #| label: setup library({{Package}}) ``` usethis/inst/templates/vscode-debug.R0000644000176200001440000000006214651000165017377 0ustar liggesusersdevtools::clean_dll() devtools::load_all() 1 + 1 usethis/inst/templates/tutorial-template.Rmd0000644000176200001440000000352014651000165021027 0ustar liggesusers--- title: "{{{ tutorial_title }}}" output: learnr::tutorial runtime: shiny_prerendered --- ```{r setup, include=FALSE} library(learnr) knitr::opts_chunk$set(echo = FALSE) ``` ## Topic 1 ### Exercise *Here's a simple exercise with an empty code chunk provided for entering the answer.* Write the R code required to add two plus two: ```{r two-plus-two, exercise=TRUE} ``` ### Exercise with Code *Here's an exercise with some prepopulated code as well as `exercise.lines = 5` to provide a bit more initial room to work.* Now write a function that adds any two numbers and then call it: ```{r add-function, exercise=TRUE, exercise.lines = 5} add <- function() { } ``` ## Topic 2 ### Exercise with Hint *Here's an exercise where the chunk is pre-evaluated via the `exercise.eval` option (so the user can see the default output we'd like them to customize). We also add a "hint" to the correct solution via the chunk immediate below labeled `print-limit-hint`.* Modify the following code to limit the number of rows printed to 5: ```{r print-limit, exercise=TRUE, exercise.eval=TRUE} mtcars ``` ```{r print-limit-hint} head(mtcars) ``` ### Quiz *You can include any number of single or multiple choice questions as a quiz. Use the `question` function to define a question and the `quiz` function for grouping multiple questions together.* Some questions to verify that you understand the purposes of various base and recommended R packages: ```{r quiz} quiz( question("Which package contains functions for installing other R packages?", answer("base"), answer("tools"), answer("utils", correct = TRUE), answer("codetools") ), question("Which of the R packages listed below are used to create plots?", answer("lattice", correct = TRUE), answer("tools"), answer("stats"), answer("grid", correct = TRUE) ) ) ``` usethis/inst/templates/license-LGPL-2.1.md0000644000176200001440000006331214651000165017712 0ustar liggesusersGNU Lesser General Public License ================================= _Version 2.1, February 1999_ _Copyright © 1991, 1999 Free Software Foundation, Inc._ _51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA_ Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. _This is the first released version of the Lesser GPL. It also counts as the successor of the GNU Library Public License, version 2, hence the version number 2.1._ ### Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below. When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things. To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it. For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights. We protect your rights with a two-step method: **(1)** we copyright the library, and **(2)** we offer you this license, which gives you legal permission to copy, distribute and/or modify the library. To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license. Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs. When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library. We call this license the “Lesser†General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances. For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License. In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system. Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library. The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a “work based on the library†and a “work that uses the libraryâ€. The former contains code derived from the library, whereas the latter must be combined with the library in order to run. ### TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION **0.** This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called “this Licenseâ€). Each licensee is addressed as “youâ€. A “library†means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. The “Libraryâ€, below, refers to any such software library or work which has been distributed under these terms. A “work based on the Library†means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term “modificationâ€.) “Source code†for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. **1.** You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. **2.** You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: * **a)** The modified work must itself be a software library. * **b)** You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. * **c)** You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. * **d)** If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. **3.** You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. This option is useful when you wish to copy part of the code of the Library into a program that is not a library. **4.** You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. **5.** A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a “work that uses the Libraryâ€. Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. However, linking a “work that uses the Library†with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a “work that uses the libraryâ€. The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. When a “work that uses the Library†uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. **6.** As an exception to the Sections above, you may also combine or link a “work that uses the Library†with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: * **a)** Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable “work that uses the Libraryâ€, as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) * **b)** Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with. * **c)** Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. * **d)** If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. * **e)** Verify that the user has already received a copy of these materials or that you have already sent this user a copy. For an executable, the required form of the “work that uses the Library†must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. **7.** You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: * **a)** Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. * **b)** Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. **8.** You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. **9.** You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. **10.** Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. **11.** If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. **12.** If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. **13.** The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and “any later versionâ€, you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. **14.** If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. ### NO WARRANTY **15.** BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY “AS IS†WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. **16.** IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. _END OF TERMS AND CONDITIONS_ ### How to Apply These Terms to Your New Libraries If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License). To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the “copyright†line and a pointer to where the full notice is found. Copyright (C) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Also add information on how to contact you by electronic and paper mail. You should also get your employer (if you work as a programmer) or your school, if any, to sign a “copyright disclaimer†for the library, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the library `Frob' (a library for tweaking knobs) written by James Random Hacker. , 1 April 1990 Ty Coon, President of Vice That's all there is to it! usethis/inst/templates/code-cpp11.cpp0000644000176200001440000000011714651000165017246 0ustar liggesusers#include using namespace cpp11; [[cpp11::register]] void fun() {} usethis/inst/templates/vignette.Rmd0000644000176200001440000000051414651000165017200 0ustar liggesusers--- title: "{{{ vignette_title }}}" output: rmarkdown::html_vignette vignette: > %\VignetteIndexEntry{{{ braced_vignette_title }}} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) ``` ```{r setup} library({{Package}}) ``` usethis/inst/templates/rmarkdown-template.Rmd0000644000176200001440000000120614651000165021167 0ustar liggesusers--- title: "Template Title" author: "Your Name" date: "The Date" output: output_format --- ```{r setup, include=FALSE} knitr::opts_chunk$set(echo = TRUE) ``` ## Adding an RMarkdown Template This file is what a user will see when they select your template. Make sure that you update the fields in the yaml header. In particular you will want to update the `output` field to whatever format your template requires. This is a good place to demonstrate special features that your template provides. Ideally it should knit out-of-the-box, or at least contain clear instructions as to what needs changing. Finally, be sure to remove this message! usethis/inst/templates/tidy-contributing.md0000644000176200001440000000573314651000165020717 0ustar liggesusers# Contributing to {{{ Package }}} This outlines how to propose a change to {{{ Package }}}. For a detailed discussion on contributing to this and other tidyverse packages, please see the [development contributing guide](https://rstd.io/tidy-contrib) and our [code review principles](https://code-review.tidyverse.org/). ## Fixing typos You can fix typos, spelling mistakes, or grammatical errors in the documentation directly using the GitHub web interface, as long as the changes are made in the _source_ file. This generally means you'll need to edit [roxygen2 comments](https://roxygen2.r-lib.org/articles/roxygen2.html) in an `.R`, not a `.Rd` file. You can find the `.R` file that generates the `.Rd` by reading the comment in the first line. ## Bigger changes If you want to make a bigger change, it's a good idea to first file an issue and make sure someone from the team agrees that it’s needed. If you’ve found a bug, please file an issue that illustrates the bug with a minimal [reprex](https://www.tidyverse.org/help/#reprex) (this will also help you write a unit test, if needed). See our guide on [how to create a great issue](https://code-review.tidyverse.org/issues/) for more advice. ### Pull request process * Fork the package and clone onto your computer. If you haven't done this before, we recommend using `usethis::create_from_github("{{github_spec}}", fork = TRUE)`. * Install all development dependencies with `devtools::install_dev_deps()`, and then make sure the package passes R CMD check by running `devtools::check()`. If R CMD check doesn't pass cleanly, it's a good idea to ask for help before continuing. * Create a Git branch for your pull request (PR). We recommend using `usethis::pr_init("brief-description-of-change")`. * Make your changes, commit to git, and then create a PR by running `usethis::pr_push()`, and following the prompts in your browser. The title of your PR should briefly describe the change. The body of your PR should contain `Fixes #issue-number`. * For user-facing changes, add a bullet to the top of `NEWS.md` (i.e. just below the first header). Follow the style described in . ### Code style * New code should follow the tidyverse [style guide](https://style.tidyverse.org). You can use the [styler](https://CRAN.R-project.org/package=styler) package to apply these styles, but please don't restyle code that has nothing to do with your PR. * We use [roxygen2](https://cran.r-project.org/package=roxygen2), with [Markdown syntax](https://cran.r-project.org/web/packages/roxygen2/vignettes/rd-formatting.html), for documentation. * We use [testthat](https://cran.r-project.org/package=testthat) for unit tests. Contributions with test cases included are easier to accept. ## Code of Conduct Please note that the {{{ Package }}} project is released with a [Contributor Code of Conduct](CODE_OF_CONDUCT.md). By contributing to this project you agree to abide by its terms. usethis/inst/templates/license-AGPL-3.md0000644000176200001440000010277714651000165017552 0ustar liggesusersGNU Affero General Public License ================================= _Version 3, 19 November 2007_ _Copyright (C) 2007 Free Software Foundation, Inc. <>_ Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. ## Preamble The GNU Affero General Public License is a free, copyleft license for software and other kinds of works, specifically designed to ensure cooperation with the community in the case of network server software. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, our General Public Licenses are intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. Developers that use our General Public Licenses protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License which gives you legal permission to copy, distribute and/or modify the software. A secondary benefit of defending all users' freedom is that improvements made in alternate versions of the program, if they receive widespread use, become available for other developers to incorporate. Many developers of free software are heartened and encouraged by the resulting cooperation. However, in the case of software used on network servers, this result may fail to come about. The GNU General Public License permits making a modified version and letting the public access it on a server without ever releasing its source code to the public. The GNU Affero General Public License is designed specifically to ensure that, in such cases, the modified source code becomes available to the community. It requires the operator of a network server to provide the source code of the modified version running there to the users of that server. Therefore, public use of a modified version, on a publicly accessible server, gives the public access to the source code of the modified version. An older license, called the Affero General Public License and published by Affero, was designed to accomplish similar goals. This is a different license, not a version of the Affero GPL, but Affero has released a new version of the Affero GPL which permits relicensing under this license. The precise terms and conditions for copying, distribution and modification follow. ## TERMS AND CONDITIONS ### 0. Definitions. "This License" refers to version 3 of the GNU Affero General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. ### 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. ### 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. ### 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. ### 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. ### 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: - a) The work must carry prominent notices stating that you modified it, and giving a relevant date. - b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". - c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. - d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. ### 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: - a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. - b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. - c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. - d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. - e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. ### 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: - a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or - b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or - c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or - d) Limiting the use for publicity purposes of names of licensors or authors of the material; or - e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or - f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. ### 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. ### 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. ### 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. ### 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. ### 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. ### 13. Remote Network Interaction; Use with the GNU General Public License. Notwithstanding any other provision of this License, if you modify the Program, your modified version must prominently offer all users interacting with it remotely through a computer network (if your version supports such interaction) an opportunity to receive the Corresponding Source of your version by providing access to the Corresponding Source from a network server at no charge, through some standard or customary means of facilitating copying of software. This Corresponding Source shall include the Corresponding Source for any work covered by version 3 of the GNU General Public License that is incorporated pursuant to the following paragraph. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the work with which it is combined will remain governed by version 3 of the GNU General Public License. ### 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU Affero General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU Affero General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU Affero General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU Affero General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. ### 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. ### 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. ### 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS ## How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If your software can interact with users remotely through a computer network, you should also make sure that it provides a way for users to get its source. For example, if your program is a web application, its interface could display a "Source" link that leads users to an archive of the code. There are many ways you could offer source, and different solutions will be better for different programs; see section 13 for the specific requirements. You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU AGPL, see . usethis/inst/templates/Makefile0000644000176200001440000000144014721173103016347 0ustar liggesusers# h/t to @jimhester and @yihui for this parse block: # https://github.com/yihui/knitr/blob/dc5ead7bcfc0ebd2789fe99c527c7d91afb3de4a/Makefile#L1-L4 # Note the portability change as suggested in the manual: # https://cran.r-project.org/doc/manuals/r-release/R-exts.html#Writing-portable-packages PKGNAME = `sed -n "s/Package: *\([^ ]*\)/\1/p" DESCRIPTION` PKGVERS = `sed -n "s/Version: *\([^ ]*\)/\1/p" DESCRIPTION` all: check build: install_deps R CMD build . check: build R CMD check --no-manual $(PKGNAME)_$(PKGVERS).tar.gz install_deps: Rscript \ -e 'if (!requireNamespace("remotes")) install.packages("remotes")' \ -e 'remotes::install_deps(dependencies = TRUE)' install: build R CMD INSTALL $(PKGNAME)_$(PKGVERS).tar.gz clean: @rm -rf $(PKGNAME)_$(PKGVERS).tar.gz $(PKGNAME).Rcheck usethis/inst/templates/lifecycle-stable.svg0000644000176200001440000000247214651000165020644 0ustar liggesusers lifecycle: stable lifecycle stable usethis/inst/templates/packagename-package.R0000644000176200001440000000013514651000165020656 0ustar liggesusers#' @keywords internal "_PACKAGE" ## usethis namespace: start ## usethis namespace: end NULL usethis/inst/templates/code.cpp0000644000176200001440000000005014651000165016320 0ustar liggesusers#include using namespace Rcpp; usethis/inst/templates/circleci-config.yml0000644000176200001440000000215714651000165020457 0ustar liggesusers{{=<% %>=}} version: 2 jobs: build: docker: - image: <% image %> environment: R_LIBS: ~/R/Library steps: - restore_cache: keys: - r-pkg-cache-{{ arch }}-{{ .Branch }} - r-pkg-cache-{{ arch }}- - checkout - run: name: Install package dependencies command: | mkdir -p ~/R/Library Rscript -e 'install.packages("remotes")' Rscript -e 'remotes::install_deps(dependencies = TRUE)' - run: name: Session information and installed package versions command: | Rscript -e 'sessionInfo()' Rscript -e 'installed.packages()[, c("Package", "Version")]' Rscript -e 'rmarkdown::pandoc_version()' - run: name: Build package command: R CMD build . - run: name: Check package command: R CMD check --as-cran --no-manual *tar.gz - store_artifacts: path: <% package %>.Rcheck/ - save_cache: key: r-pkg-cache-{{ arch }}-{{ .Branch }} paths: - "~/R/Library" usethis/inst/templates/lifecycle-experimental.svg0000644000176200001440000000245014651000165022063 0ustar liggesusers lifecycle: experimental lifecycle experimental usethis/inst/templates/license-LGPL-3.md0000644000176200001440000001661014651000165017553 0ustar liggesusersGNU Lesser General Public License ================================= _Version 3, 29 June 2007_ _Copyright © 2007 Free Software Foundation, Inc. <>_ Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below. ### 0. Additional Definitions As used herein, “this License†refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL†refers to version 3 of the GNU General Public License. “The Library†refers to a covered work governed by this License, other than an Application or a Combined Work as defined below. An “Application†is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library. A “Combined Work†is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Versionâ€. The “Minimal Corresponding Source†for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version. The “Corresponding Application Code†for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work. ### 1. Exception to Section 3 of the GNU GPL You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL. ### 2. Conveying Modified Versions If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version: * **a)** under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or * **b)** under the GNU GPL, with none of the additional permissions of this License applicable to that copy. ### 3. Object Code Incorporating Material from Library Header Files The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following: * **a)** Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License. * **b)** Accompany the object code with a copy of the GNU GPL and this license document. ### 4. Combined Works You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following: * **a)** Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License. * **b)** Accompany the Combined Work with a copy of the GNU GPL and this license document. * **c)** For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document. * **d)** Do one of the following: - **0)** Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source. - **1)** Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that **(a)** uses at run time a copy of the Library already present on the user's computer system, and **(b)** will operate properly with a modified version of the Library that is interface-compatible with the Linked Version. * **e)** Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option **4d0**, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option **4d1**, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.) ### 5. Combined Libraries You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following: * **a)** Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License. * **b)** Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. ### 6. Revised Versions of the GNU Lesser General Public License The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version†applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation. If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library. usethis/inst/templates/lifecycle-deprecated.svg0000644000176200001440000000244014651000165021465 0ustar liggesusers lifecycle: deprecated lifecycle deprecated usethis/inst/templates/readme-rmd-pre-commit.sh0000644000176200001440000000064714651000165021341 0ustar liggesusers#!/bin/bash README=($(git diff --cached --name-only | grep -Ei '^README\.[R]?md$')) MSG="use 'git commit --no-verify' to override this check" if [[ ${#README[@]} == 0 ]]; then exit 0 fi if [[ README.Rmd -nt README.md ]]; then echo -e "README.md is out of date; please re-knit README.Rmd\n$MSG" exit 1 elif [[ ${#README[@]} -lt 2 ]]; then echo -e "README.Rmd and README.md should be both staged\n$MSG" exit 1 fi usethis/inst/templates/license-mit.md0000644000176200001440000000207714651000165017450 0ustar liggesusers# MIT License Copyright (c) {{{year}}} {{{copyright_holder}}} Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. usethis/inst/templates/junit-testthat.R0000644000176200001440000000040114651000165020014 0ustar liggesuserslibrary(testthat) library({{{ name }}}) if (requireNamespace("xml2")) { test_check("{{{ name }}}", reporter = MultiReporter$new(reporters = list(JunitReporter$new(file = "test-results.xml"), CheckReporter$new()))) } else { test_check("{{{ name }}}") } usethis/inst/templates/lifecycle-superseded.svg0000644000176200001440000000244014651000165021530 0ustar liggesusers lifecycle: superseded lifecycle superseded usethis/inst/templates/rmarkdown-template.yml0000644000176200001440000000016314651000165021247 0ustar liggesusersname: {{{ template_name }}} description: > {{{ template_description }}} create_dir: {{{ template_create_dir }}} usethis/inst/templates/addins.dcf0000644000176200001440000000013614651000165016627 0ustar liggesusersName: New Addin Name Description: New Addin Description Binding: {{addin}} Interactive: false usethis/inst/templates/tidy-support.md0000644000176200001440000000467714651000165017732 0ustar liggesusers# Getting help with {{{Package}}} Thanks for using {{{Package}}}! Before filing an issue, there are a few places to explore and pieces to put together to make the process as smooth as possible. ## Make a reprex Start by making a minimal **repr**oducible **ex**ample using the [reprex](https://reprex.tidyverse.org/) package. If you haven't heard of or used reprex before, you're in for a treat! Seriously, reprex will make all of your R-question-asking endeavors easier (which is a pretty incredible ROI for the five to ten minutes it'll take you to learn what it's all about). For additional reprex pointers, check out the [Get help!](https://www.tidyverse.org/help/) section of the tidyverse site. ## Where to ask? Armed with your reprex, the next step is to figure out [where to ask](https://www.tidyverse.org/help/#where-to-ask). * If it's a question: start with [community.rstudio.com](https://community.rstudio.com/), and/or StackOverflow. There are more people there to answer questions. * If it's a bug: you're in the right place, [file an issue](https://github.com/{{github_spec}}/issues/new). * If you're not sure: let the community help you figure it out! If your problem _is_ a bug or a feature request, you can easily return here and report it. Before opening a new issue, be sure to [search issues and pull requests](https://github.com/{{github_spec}}/issues) to make sure the bug hasn't been reported and/or already fixed in the development version. By default, the search will be pre-populated with `is:issue is:open`. You can [edit the qualifiers](https://help.github.com/articles/searching-issues-and-pull-requests/) (e.g. `is:pr`, `is:closed`) as needed. For example, you'd simply remove `is:open` to search _all_ issues in the repo, open or closed. ## What happens next? To be as efficient as possible, development of tidyverse packages tends to be very bursty, so you shouldn't worry if you don't get an immediate response. Typically we don't look at a repo until a sufficient quantity of issues accumulates, then there’s a burst of intense activity as we focus our efforts. That makes development more efficient because it avoids expensive context switching between problems, at the cost of taking longer to get back to you. This process makes a good reprex particularly important because it might be multiple months between your initial report and when we start working on it. If we can’t reproduce the bug, we can’t fix it! usethis/inst/templates/gitlab-ci.yml0000644000176200001440000000176314651000165017274 0ustar liggesusersimage: rocker/tidyverse stages: - build - test - deploy building: stage: build script: - R -e "remotes::install_deps(dependencies = TRUE)" - R -e 'devtools::check()' # To have the coverage percentage appear as a gitlab badge follow these # instructions: # https://docs.gitlab.com/ee/user/project/pipelines/settings.html#test-coverage-parsing # The coverage parsing string is # Coverage: \d+\.\d+ testing: stage: test allow_failure: true when: on_success only: - master script: - Rscript -e 'install.packages("DT")' - Rscript -e 'covr::gitlab(quiet = FALSE)' artifacts: paths: - public # To produce a code coverage report as a GitLab page see # https://about.gitlab.com/2016/11/03/publish-code-coverage-report-with-gitlab-pages/ pages: stage: deploy dependencies: - testing script: - ls artifacts: paths: - public expire_in: 30 days only: - master usethis/inst/templates/citation-template.R0000644000176200001440000000023214651000165020452 0ustar liggesusersbibentry( bibtype = "Article", title = , author = , journal = , year = , volume = , number = , pages = , doi = ) usethis/inst/templates/cran-comments.md0000644000176200001440000000012114651000165017771 0ustar liggesusers## R CMD check results 0 errors | 0 warnings | 1 note * This is a new release. usethis/inst/templates/year-copyright.txt0000644000176200001440000000007214651000165020415 0ustar liggesusersYEAR: {{{year}}} COPYRIGHT HOLDER: {{{copyright_holder}}} usethis/inst/templates/license-ccby-4.md0000644000176200001440000004433414651000165017742 0ustar liggesusersAttribution 4.0 International ======================================================================= Creative Commons Corporation ("Creative Commons") is not a law firm and does not provide legal services or legal advice. Distribution of Creative Commons public licenses does not create a lawyer-client or other relationship. Creative Commons makes its licenses and related information available on an "as-is" basis. Creative Commons gives no warranties regarding its licenses, any material licensed under their terms and conditions, or any related information. Creative Commons disclaims all liability for damages resulting from their use to the fullest extent possible. Using Creative Commons Public Licenses Creative Commons public licenses provide a standard set of terms and conditions that creators and other rights holders may use to share original works of authorship and other material subject to copyright and certain other rights specified in the public license below. The following considerations are for informational purposes only, are not exhaustive, and do not form part of our licenses. Considerations for licensors: Our public licenses are intended for use by those authorized to give the public permission to use material in ways otherwise restricted by copyright and certain other rights. Our licenses are irrevocable. Licensors should read and understand the terms and conditions of the license they choose before applying it. Licensors should also secure all rights necessary before applying our licenses so that the public can reuse the material as expected. Licensors should clearly mark any material not subject to the license. This includes other CC- licensed material, or material used under an exception or limitation to copyright. More considerations for licensors: wiki.creativecommons.org/Considerations_for_licensors Considerations for the public: By using one of our public licenses, a licensor grants the public permission to use the licensed material under specified terms and conditions. If the licensor's permission is not necessary for any reason--for example, because of any applicable exception or limitation to copyright--then that use is not regulated by the license. Our licenses grant only permissions under copyright and certain other rights that a licensor has authority to grant. Use of the licensed material may still be restricted for other reasons, including because others have copyright or other rights in the material. A licensor may make special requests, such as asking that all changes be marked or described. Although not required by our licenses, you are encouraged to respect those requests where reasonable. More considerations for the public: wiki.creativecommons.org/Considerations_for_licensees ======================================================================= Creative Commons Attribution 4.0 International Public License By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions. Section 1 -- Definitions. a. Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image. b. Adapter's License means the license You apply to Your Copyright and Similar Rights in Your contributions to Adapted Material in accordance with the terms and conditions of this Public License. c. Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights. d. Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements. e. Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material. f. Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License. g. Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license. h. Licensor means the individual(s) or entity(ies) granting rights under this Public License. i. Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them. j. Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world. k. You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning. Section 2 -- Scope. a. License grant. 1. Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to: a. reproduce and Share the Licensed Material, in whole or in part; and b. produce, reproduce, and Share Adapted Material. 2. Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions. 3. Term. The term of this Public License is specified in Section 6(a). 4. Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a) (4) never produces Adapted Material. 5. Downstream recipients. a. Offer from the Licensor -- Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License. b. No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material. 6. No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i). b. Other rights. 1. Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise. 2. Patent and trademark rights are not licensed under this Public License. 3. To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties. Section 3 -- License Conditions. Your exercise of the Licensed Rights is expressly made subject to the following conditions. a. Attribution. 1. If You Share the Licensed Material (including in modified form), You must: a. retain the following if it is supplied by the Licensor with the Licensed Material: i. identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated); ii. a copyright notice; iii. a notice that refers to this Public License; iv. a notice that refers to the disclaimer of warranties; v. a URI or hyperlink to the Licensed Material to the extent reasonably practicable; b. indicate if You modified the Licensed Material and retain an indication of any previous modifications; and c. indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License. 2. You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information. 3. If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable. 4. If You Share Adapted Material You produce, the Adapter's License You apply must not prevent recipients of the Adapted Material from complying with this Public License. Section 4 -- Sui Generis Database Rights. Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material: a. for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database; b. if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and c. You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database. For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights. Section 5 -- Disclaimer of Warranties and Limitation of Liability. a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. c. The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability. Section 6 -- Term and Termination. a. This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically. b. Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates: 1. automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or 2. upon express reinstatement by the Licensor. For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License. c. For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License. d. Sections 1, 5, 6, 7, and 8 survive termination of this Public License. Section 7 -- Other Terms and Conditions. a. The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed. b. Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License. Section 8 -- Interpretation. a. For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License. b. To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions. c. No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor. d. Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority. ======================================================================= Creative Commons is not a party to its public licenses. Notwithstanding, Creative Commons may elect to apply one of its public licenses to material it publishes and in those instances will be considered the “Licensor.†The text of the Creative Commons public licenses is dedicated to the public domain under the CC0 Public Domain Dedication. Except for the limited purpose of indicating that material is shared under a Creative Commons public license or as otherwise permitted by the Creative Commons policies published at creativecommons.org/policies, Creative Commons does not authorize the use of the trademark "Creative Commons" or any other trademark or logo of Creative Commons without its prior written consent including, without limitation, in connection with any unauthorized modifications to any of its public licenses or any other arrangements, understandings, or agreements concerning use of licensed material. For the avoidance of doubt, this paragraph does not form part of the public licenses. Creative Commons may be contacted at creativecommons.org. usethis/inst/templates/packagename-data-prep.R0000644000176200001440000000014314651000165021137 0ustar liggesusers## code to prepare `{{{name}}}` dataset goes here usethis::use_data({{{name}}}, overwrite = TRUE) usethis/inst/templates/NEWS.md0000644000176200001440000000007314651000165016005 0ustar liggesusers# {{{ Package }}} {{{ Version }}} * {{{ InitialBullet }}} usethis/inst/templates/CODE_OF_CONDUCT.md0000644000176200001440000001216514651000165017513 0ustar liggesusers# Contributor Covenant Code of Conduct ## Our Pledge We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, caste, color, religion, or sexual identity and orientation. We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. ## Our Standards Examples of behavior that contributes to a positive environment for our community include: * Demonstrating empathy and kindness toward other people * Being respectful of differing opinions, viewpoints, and experiences * Giving and gracefully accepting constructive feedback * Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience * Focusing on what is best not just for us as individuals, but for the overall community Examples of unacceptable behavior include: * The use of sexualized language or imagery, and sexual attention or advances of any kind * Trolling, insulting or derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or email address, without their explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Enforcement Responsibilities Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. ## Scope This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at {{{ contact }}}. All complaints will be reviewed and investigated promptly and fairly. All community leaders are obligated to respect the privacy and security of the reporter of any incident. ## Enforcement Guidelines Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: ### 1. Correction **Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. **Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. ### 2. Warning **Community Impact**: A violation through a single incident or series of actions. **Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. ### 3. Temporary Ban **Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. **Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. ### 4. Permanent Ban **Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. **Consequence**: A permanent ban from any sort of public interaction within the community. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.1, available at . Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder][https://github.com/mozilla/inclusion]. For answers to common questions about this code of conduct, see the FAQ at . Translations are available at . [homepage]: https://www.contributor-covenant.org usethis/inst/templates/template.Rproj0000644000176200001440000000062214651000165017540 0ustar liggesusersVersion: 1.0 RestoreWorkspace: No SaveWorkspace: No AlwaysSaveHistory: Default EnableCodeIndexing: Yes Encoding: UTF-8 {{#reformat}} AutoAppendNewline: Yes StripTrailingWhitespace: Yes LineEndingConversion: {{line_ending}} {{/reformat}} {{#is_pkg}} BuildType: Package PackageUseDevtools: Yes PackageInstallArgs: --no-multiarch --with-keep.source PackageRoxygenize: rd,collate,namespace {{/is_pkg}} usethis/inst/templates/testthat.R0000644000176200001440000000062414651000165016674 0ustar liggesusers# This file is part of the standard setup for testthat. # It is recommended that you do not modify it. # # Where should you do additional test configuration? # Learn more about the roles of various files in: # * https://r-pkgs.org/testing-design.html#sec-tests-files-overview # * https://testthat.r-lib.org/articles/special-files.html library(testthat) library({{{ name }}}) test_check("{{{ name }}}") usethis/inst/templates/package-README0000644000176200001440000000262614717524721017202 0ustar liggesusers{{#Rmd}} --- output: github_document --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>", fig.path = "man/figures/README-", out.width = "100%" ) ``` {{/Rmd}} # {{{ Package }}} The goal of {{{ Package }}} is to ... ## Installation {{#on_github}} You can install the development version of {{{ Package }}} from [GitHub](https://github.com/) with: ``` r # install.packages("pak") pak::pak("{{{ github_spec }}}") ``` {{/on_github}} {{^on_github}} You can install the development version of {{{ Package }}} like so: ``` r # FILL THIS IN! HOW CAN PEOPLE INSTALL YOUR DEV PACKAGE? ``` {{/on_github}} ## Example This is a basic example which shows you how to solve a common problem: {{#Rmd}} ```{r example} {{/Rmd}} {{^Rmd}}``` r {{/Rmd}} library({{Package}}) ## basic example code ``` {{#Rmd}} What is special about using `README.Rmd` instead of just `README.md`? You can include R chunks like so: ```{r cars} summary(cars) ``` You'll still need to render `README.Rmd` regularly, to keep `README.md` up-to-date. `devtools::build_readme()` is handy for this. You can also embed plots, for example: ```{r pressure, echo = FALSE} plot(pressure) ``` In that case, don't forget to commit and push the resulting figure files, so they display on GitHub and CRAN. {{/Rmd}} usethis/inst/templates/license-apache-2.md0000644000176200001440000002436014651000165020236 0ustar liggesusersApache License ============== _Version 2.0, January 2004_ _<>_ ### Terms and Conditions for use, reproduction, and distribution #### 1. Definitions “License†shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. “Licensor†shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. “Legal Entity†shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, “control†means **(i)** the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or **(ii)** ownership of fifty percent (50%) or more of the outstanding shares, or **(iii)** beneficial ownership of such entity. “You†(or “Yourâ€) shall mean an individual or Legal Entity exercising permissions granted by this License. “Source†form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. “Object†form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. “Work†shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). “Derivative Works†shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. “Contribution†shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, “submitted†means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as “Not a Contribution.†“Contributor†shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. #### 2. Grant of Copyright License Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. #### 3. Grant of Patent License Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. #### 4. Redistribution You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: * **(a)** You must give any other recipients of the Work or Derivative Works a copy of this License; and * **(b)** You must cause any modified files to carry prominent notices stating that You changed the files; and * **(c)** You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and * **(d)** If the Work includes a “NOTICE†text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. #### 5. Submission of Contributions Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. #### 6. Trademarks This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. #### 7. Disclaimer of Warranty Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an “AS IS†BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. #### 8. Limitation of Liability In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. #### 9. Accepting Warranty or Additional Liability While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. _END OF TERMS AND CONDITIONS_ ### APPENDIX: How to apply the Apache License to your work To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets `[]` replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same “printed page†as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. usethis/inst/templates/project-README0000644000176200001440000000135214651000165017234 0ustar liggesusers{{#Rmd}} --- output: github_document --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) ``` {{/Rmd}} # {{{ Project }}} The goal of {{{ Project }}} is to ... {{#Rmd}} What is special about using `README.Rmd` instead of just `README.md`? You can include R chunks like so: ```{r cars} summary(cars) ``` You'll still need to render `README.Rmd` regularly, to keep `README.md` up-to-date. You can also embed plots, for example: ```{r pressure, echo = FALSE} plot(pressure) ``` In that case, don't forget to commit and push the resulting figure files, so they display on GitHub. {{/Rmd}} usethis/inst/templates/tidy-issue.md0000644000176200001440000000123514651000165017331 0ustar liggesusers--- name: Bug report or feature request about: Describe a bug you've seen or make a case for a new feature --- Please briefly describe your problem and what output you expect. If you have a question, please don't use this form. Instead, ask on or . Please include a minimal reproducible example (AKA a reprex). If you've never heard of a [reprex](http://reprex.tidyverse.org/) before, start by reading . For more advice on how to write a great issue, see . Brief description of the problem ```r # insert reprex here ``` usethis/inst/templates/article.qmd0000644000176200001440000000022314721145632017041 0ustar liggesusers--- title: "{{{ vignette_title }}}" knitr: opts_chunk: collapse: true comment: '#>' --- ```{r} #| label: setup library({{Package}}) ``` usethis/inst/templates/license-GPL-2.md0000644000176200001440000004302514651000165017436 0ustar liggesusersGNU General Public License ========================== _Version 2, June 1991_ _Copyright © 1989, 1991 Free Software Foundation, Inc.,_ _51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA_ Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. ### Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: **(1)** copyright the software, and **(2)** offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. ### TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION **0.** This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The “Programâ€, below, refers to any such program or work, and a “work based on the Program†means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term “modificationâ€.) Each licensee is addressed as “youâ€. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. **1.** You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. **2.** You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: * **a)** You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. * **b)** You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. * **c)** If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. **3.** You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: * **a)** Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, * **b)** Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, * **c)** Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. **4.** You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. **5.** You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. **6.** Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. **7.** If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. **8.** If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. **9.** The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and “any later versionâ€, you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. **10.** If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. ### NO WARRANTY **11.** BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS†WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. **12.** IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS ### How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the “copyright†line and a pointer to where the full notice is found. Copyright (C) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) year name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w` and `show c` should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w` and `show c`; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a “copyright disclaimer†for the program, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. , 1 April 1989 Ty Coon, President of Vice This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. usethis/inst/templates/license-proprietary.txt0000644000176200001440000000010214651000165021441 0ustar liggesusersCopyright {{{year}}} {{{copyright_holder}}}. All rights reserved. usethis/inst/templates/code.c0000644000176200001440000000007214651000165015764 0ustar liggesusers#define R_NO_REMAP #include #include usethis/inst/WORDLIST0000644000176200001440000000311614717720323014114 0ustar liggesusersAGPL Addin AppVeyor Appveyor BioC BioConductor Bitwarden BugReports CCBY CLA CMD CNAME CRAN's CircleCI CoC Codecov Colours Csárdi Depsy DockerHub DropBox GHE Gert Gert's Git's GitLab Gitignores Gábor HTTPS Homebrew Initialise JSON Jenkinsfile Jupyter Keychain LF LGL Lifecycle LinkingTo METACRAN Makefile ORCID PATs PBC PRs README REPO RMarkdown RProfile RStudio RStudio's Rcpp RcppArmadillo RcppEigen Repo Rmd Rmds Roxygen Rtools SHA Sitrep TLS TeXLive Templated Tidyverse UI Ushey's Xcode YAML addin addins alphabetises api applypatch artefacts aspirationally auth authenticator backports badged behaviour bzip ci cli cli's clipr clisymbols codecov colour colours config convergently covr cpp customised dev devtools discoverable else's emacs eval favour fiascos filenaming foofy formidabel frontmatter fs funder gc gert gh gh's gitcreds github gitignore googledrive grey gzip href https httr ing initialisation initialises initialising inlined io jsonlite labelled labelling learnr libgit lifecycle ly macbook magrittr magrittr's md mergeable minimise msg namespacing nano noninteractive oldrel organisation organisations ort pak pandoc pkgdown pos pre programmatically purrr r's rOpenSci rappdirs readme rebase reconfigures redirections repo repo's repos reprex reproducibility revdep revdepcheck revdepchecks rladies rlang rmarkdown ropensci roxygen rprojroot rstd shortcode shortlink shortlinks signalling sitrep solarized src styler symlink symlinks templated templating testthat tibble tidymodels tidyverse todo todo's travis triaged uation ui un unpushed useR usethis's ver vm withr xyz xz yaml yyy zzz usethis/README.md0000644000176200001440000001143614721151423013222 0ustar liggesusers # usethis usethis website [![R-CMD-check](https://github.com/r-lib/usethis/actions/workflows/R-CMD-check.yaml/badge.svg)](https://github.com/r-lib/usethis/actions/workflows/R-CMD-check.yaml) [![CRAN status](https://www.r-pkg.org/badges/version/usethis)](https://CRAN.R-project.org/package=usethis) [![Lifecycle: stable](https://img.shields.io/badge/lifecycle-stable-brightgreen.svg)](https://lifecycle.r-lib.org/articles/stages.html#stable) [![Codecov test coverage](https://codecov.io/gh/r-lib/usethis/graph/badge.svg)](https://app.codecov.io/gh/r-lib/usethis) usethis is a workflow package: it automates repetitive tasks that arise during project setup and development, both for R packages and non-package projects. ## Installation Install the released version of usethis from CRAN: ``` r install.packages("usethis") ``` Or install the development version from GitHub with: ``` r # install.packages("pak") pak::pak("r-lib/usethis") ``` ## Usage Most `use_*()` functions operate on the *active project*: literally, a directory on your computer. If you’ve just used usethis to create a new package or project, that will be the active project. Otherwise, usethis verifies that current working directory is or is below a valid project directory and that becomes the active project. Use `proj_get()` or `proj_sitrep()` to manually query the project and [read more in the docs](https://usethis.r-lib.org/reference/proj_utils.html). A few usethis functions have no strong connections to projects and will expect you to provide a path. usethis is quite chatty, explaining what it’s doing and assigning you tasks. `✔` indicates something usethis has done for you. `â˜` indicates that you’ll need to do some work yourself. Below is a quick look at how usethis can help to set up a package. But remember, many usethis functions are also applicable to analytical projects that are not packages. ``` r library(usethis) # Create a new package ------------------------------------------------- path <- file.path(tempdir(), "mypkg") create_package(path) #> ✔ Creating '/tmp/RtmpPZsquk/mypkg/'. #> ✔ Setting active project to "/private/tmp/RtmpPZsquk/mypkg". #> ✔ Creating 'R/'. #> ✔ Writing 'DESCRIPTION'. #> Package: mypkg #> Title: What the Package Does (One Line, Title Case) #> Version: 0.0.0.9000 #> Authors@R (parsed): #> * First Last [aut, cre] #> Description: What the package does (one paragraph). #> License: `use_mit_license()`, `use_gpl3_license()` or friends to pick a #> license #> Encoding: UTF-8 #> Roxygen: list(markdown = TRUE) #> RoxygenNote: 7.3.2 #> ✔ Writing 'NAMESPACE'. #> ✔ Setting active project to "". # only needed since this session isn't interactive proj_activate(path) #> ✔ Setting active project to "/private/tmp/RtmpPZsquk/mypkg". #> ✔ Changing working directory to '/tmp/RtmpPZsquk/mypkg/' # Modify the description ---------------------------------------------- use_mit_license("My Name") #> ✔ Adding "MIT + file LICENSE" to 'License'. #> ✔ Writing 'LICENSE'. #> ✔ Writing 'LICENSE.md'. #> ✔ Adding "^LICENSE\\.md$" to '.Rbuildignore'. use_package("rmarkdown", "Suggests") #> ✔ Adding rmarkdown to 'Suggests' field in DESCRIPTION. #> ☠Use `requireNamespace("rmarkdown", quietly = TRUE)` to test if rmarkdown is #> installed. #> ☠Then directly refer to functions with `rmarkdown::fun()`. # Set up other files ------------------------------------------------- use_readme_md() #> ✔ Writing 'README.md'. #> ☠Update 'README.md' to include installation instructions. use_news_md() #> ✔ Writing 'NEWS.md'. use_test("my-test") #> ✔ Adding testthat to 'Suggests' field in DESCRIPTION. #> ✔ Adding "3" to 'Config/testthat/edition'. #> ✔ Creating 'tests/testthat/'. #> ✔ Writing 'tests/testthat.R'. #> ✔ Writing 'tests/testthat/test-my-test.R'. #> ☠Edit 'tests/testthat/test-my-test.R'. x <- 1 y <- 2 use_data(x, y) #> ✔ Adding R to 'Depends' field in DESCRIPTION. #> ✔ Creating 'data/'. #> ✔ Setting 'LazyData' to "true" in 'DESCRIPTION'. #> ✔ Saving "x" and "y" to "data/x.rda" and "data/y.rda". #> ☠Document your data (see ). # Use git ------------------------------------------------------------ use_git() #> ✔ Initialising Git repo. #> ✔ Adding ".Rproj.user", ".Rhistory", ".Rdata", ".httr-oauth", ".DS_Store", and #> ".quarto" to '.gitignore'. ``` ## Code of Conduct Please note that the usethis project is released with a [Contributor Code of Conduct](https://usethis.r-lib.org/CODE_OF_CONDUCT.html). By contributing to this project, you agree to abide by its terms. usethis/man/0000755000176200001440000000000014721145632012516 5ustar liggesusersusethis/man/use_github_links.Rd0000644000176200001440000000207514717524721016354 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github.R \name{use_github_links} \alias{use_github_links} \title{Use GitHub links in URL and BugReports} \usage{ use_github_links(overwrite = FALSE) } \arguments{ \item{overwrite}{By default, \code{use_github_links()} will not overwrite existing fields. Set to \code{TRUE} to overwrite existing links.} } \description{ Populates the \code{URL} and \code{BugReports} fields of a GitHub-using R package with appropriate links. The GitHub repo to link to is determined from the current project's GitHub remotes: \itemize{ \item If we are not working with a fork, this function expects \code{origin} to be a GitHub remote and the links target that repo. \item If we are working in a fork, this function expects to find two GitHub remotes: \code{origin} (the fork) and \code{upstream} (the fork's parent) remote. In an interactive session, the user can confirm which repo to use for the links. In a noninteractive session, links are formed using \code{upstream}. } } \examples{ \dontrun{ use_github_links() } } usethis/man/use_news_md.Rd0000644000176200001440000000112514651000165015305 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/news.R \name{use_news_md} \alias{use_news_md} \title{Create a simple \code{NEWS.md}} \usage{ use_news_md(open = rlang::is_interactive()) } \arguments{ \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ This creates a basic \code{NEWS.md} in the root directory. } \seealso{ The \href{https://r-pkgs.org/other-markdown.html}{other markdown files section} of \href{https://r-pkgs.org}{R Packages}. } usethis/man/use_github.Rd0000644000176200001440000000751714717524721015162 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github.R \name{use_github} \alias{use_github} \title{Connect a local repo with GitHub} \usage{ use_github( organisation = NULL, private = FALSE, visibility = c("public", "private", "internal"), protocol = git_protocol(), host = NULL ) } \arguments{ \item{organisation}{If supplied, the repo will be created under this organisation, instead of the login associated with the GitHub token discovered for this \code{host}. The user's role and the token's scopes must be such that you have permission to create repositories in this \code{organisation}.} \item{private}{If \code{TRUE}, creates a private repository.} \item{visibility}{Only relevant for organisation-owned repos associated with certain GitHub Enterprise products. The special "internal" \code{visibility} grants read permission to all organisation members, i.e. it's intermediate between "private" and "public", within GHE. When specified, \code{visibility} takes precedence over \code{private = TRUE/FALSE}.} \item{protocol}{One of "https" or "ssh"} \item{host}{GitHub host to target, passed to the \code{.api_url} argument of \code{\link[gh:gh]{gh::gh()}}. If unspecified, gh defaults to "https://api.github.com", although gh's default can be customised by setting the GITHUB_API_URL environment variable. For a hypothetical GitHub Enterprise instance, either "https://github.acme.com/api/v3" or "https://github.acme.com" is acceptable.} } \description{ \code{use_github()} takes a local project and: \itemize{ \item Checks that the initial state is good to go: \itemize{ \item Project is already a Git repo \item Current branch is the default branch, e.g. \code{main} or \code{master} \item No uncommitted changes \item No pre-existing \code{origin} remote } \item Creates an associated repo on GitHub \item Adds that GitHub repo to your local repo as the \code{origin} remote \item Makes an initial push to GitHub \item Calls \code{\link[=use_github_links]{use_github_links()}}, if the project is an R package \item Configures \code{origin/DEFAULT} to be the upstream branch of the local \code{DEFAULT} branch, e.g. \code{main} or \code{master} } See below for the authentication setup that is necessary for all of this to work. } \section{Git/GitHub Authentication}{ Many usethis functions, including those documented here, potentially interact with GitHub in two different ways: \itemize{ \item Via the GitHub REST API. Examples: create a repo, a fork, or a pull request. \item As a conventional Git remote. Examples: clone, fetch, or push. } Therefore two types of auth can happen and your credentials must be discoverable. Which credentials do we mean? \itemize{ \item A GitHub personal access token (PAT) must be discoverable by the gh package, which is used for GitHub operations via the REST API. See \code{\link[=gh_token_help]{gh_token_help()}} for more about getting and configuring a PAT. \item If you use the HTTPS protocol for Git remotes, your PAT is also used for Git operations, such as \verb{git push}. Usethis uses the gert package for this, so the PAT must be discoverable by gert. Generally gert and gh will discover and use the same PAT. This ability to "kill two birds with one stone" is why HTTPS + PAT is our recommended auth strategy for those new to Git and GitHub and PRs. \item If you use SSH remotes, your SSH keys must also be discoverable, in addition to your PAT. The public key must be added to your GitHub account. } Git/GitHub credential management is covered in a dedicated article: \href{https://usethis.r-lib.org/articles/articles/git-credentials.html}{Managing Git(Hub) Credentials} } \examples{ \dontrun{ pkgpath <- file.path(tempdir(), "testpkg") create_package(pkgpath) ## now, working inside "testpkg", initialize git repository use_git() ## create github repository and configure as git remote use_github() } } usethis/man/git_protocol.Rd0000644000176200001440000000312514651000165015503 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{git_protocol} \alias{git_protocol} \alias{use_git_protocol} \title{See or set the default Git protocol} \usage{ git_protocol() use_git_protocol(protocol) } \arguments{ \item{protocol}{One of "https" or "ssh"} } \value{ The protocol, either "https" or "ssh" } \description{ Git operations that address a remote use a so-called "transport protocol". usethis supports HTTPS and SSH. The protocol dictates the Git URL format used when usethis needs to configure the first GitHub remote for a repo: \itemize{ \item \code{protocol = "https"} implies \verb{https://github.com//.git} \item \code{protocol = "ssh"} implies \verb{git@github.com:/.git} } Two helper functions are available: \itemize{ \item \code{git_protocol()} reveals the protocol "in force". As of usethis v2.0.0, this defaults to "https". You can change this for the duration of the R session with \code{use_git_protocol()}. Change the default for all R sessions with code like this in your \code{.Rprofile} (easily editable via \code{\link[=edit_r_profile]{edit_r_profile()}}): \if{html}{\out{
}}\preformatted{options(usethis.protocol = "ssh") }\if{html}{\out{
}} \item \code{use_git_protocol()} sets the Git protocol for the current R session } This protocol only affects the Git URL for newly configured remotes. All existing Git remote URLs are always respected, whether HTTPS or SSH. } \examples{ \dontrun{ git_protocol() use_git_protocol("ssh") git_protocol() use_git_protocol("https") git_protocol() } } usethis/man/use_rcpp.Rd0000644000176200001440000000147714651000165014627 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rcpp.R \name{use_rcpp} \alias{use_rcpp} \alias{use_rcpp_armadillo} \alias{use_rcpp_eigen} \alias{use_c} \title{Use C, C++, RcppArmadillo, or RcppEigen} \usage{ use_rcpp(name = NULL) use_rcpp_armadillo(name = NULL) use_rcpp_eigen(name = NULL) use_c(name = NULL) } \arguments{ \item{name}{Either a string giving a file name (without directory) or \code{NULL} to take the name from the currently open file in RStudio.} } \description{ Adds infrastructure commonly needed when using compiled code: \itemize{ \item Creates \verb{src/} \item Adds required packages to \code{DESCRIPTION} \item May create an initial placeholder \code{.c} or \code{.cpp} file \item Creates \code{Makevars} and \code{Makevars.win} files (\code{use_rcpp_armadillo()} only) } } usethis/man/use_git_hook.Rd0000644000176200001440000000136314651000165015460 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{use_git_hook} \alias{use_git_hook} \title{Add a git hook} \usage{ use_git_hook(hook, script) } \arguments{ \item{hook}{Hook name. One of "pre-commit", "prepare-commit-msg", "commit-msg", "post-commit", "applypatch-msg", "pre-applypatch", "post-applypatch", "pre-rebase", "post-rewrite", "post-checkout", "post-merge", "pre-push", "pre-auto-gc".} \item{script}{Text of script to run} } \description{ Sets up a git hook using the specified script. Creates a hook directory if needed, and sets correct permissions on hook. } \seealso{ Other git helpers: \code{\link{use_git}()}, \code{\link{use_git_config}()}, \code{\link{use_git_ignore}()} } \concept{git helpers} usethis/man/use_tidy_eval.Rd0000644000176200001440000000130714717524721015647 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/usethis-deprecated.R \name{use_tidy_eval} \alias{use_tidy_eval} \title{Deprecated tidyverse functions} \usage{ use_tidy_eval() } \description{ \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}} \itemize{ \item \code{use_tidy_eval()} is deprecated because there's no longer a need to systematically import and re-export a large number of functions in order to use tidy evaluation. Instead, use \code{\link[=use_import_from]{use_import_from()}} to tactically import functions as you need them. } } \keyword{internal} usethis/man/use_description.Rd0000644000176200001440000000531714717524721016217 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/description.R \name{use_description} \alias{use_description} \alias{use_description_defaults} \title{Create or modify a DESCRIPTION file} \usage{ use_description(fields = list(), check_name = TRUE, roxygen = TRUE) use_description_defaults(package = NULL, roxygen = TRUE, fields = list()) } \arguments{ \item{fields}{A named list of fields to add to \code{DESCRIPTION}, potentially overriding default values. Default values are taken from the \code{"usethis.description"} option or the usethis package (in that order), and can be viewed with \code{use_description_defaults()}.} \item{check_name}{Whether to check if the name is valid for CRAN and throw an error if not.} \item{roxygen}{If \code{TRUE}, sets \code{RoxygenNote} to current roxygen2 version} \item{package}{Package name} } \description{ \code{use_description()} creates a \code{DESCRIPTION} file. Although mostly associated with R packages, a \code{DESCRIPTION} file can also be used to declare dependencies for a non-package project. Within such a project, \code{devtools::install_deps()} can then be used to install all the required packages. Note that, by default, \code{use_decription()} checks for a CRAN-compliant package name. You can turn this off with \code{check_name = FALSE}. usethis consults the following sources, in this order, to set \code{DESCRIPTION} fields: \itemize{ \item \code{fields} argument of \code{\link[=create_package]{create_package()}} or \code{use_description()} \item \code{getOption("usethis.description")} \item Defaults built into usethis } The fields discovered via options or the usethis package can be viewed with \code{use_description_defaults()}. If you create a lot of packages, consider storing personalized defaults as a named list in an option named \code{"usethis.description"}. Here's an example of code to include in \code{.Rprofile}, which can be opened via \code{\link[=edit_r_profile]{edit_r_profile()}}: \if{html}{\out{
}}\preformatted{options( usethis.description = list( "Authors@R" = utils::person( "Jane", "Doe", email = "jane@example.com", role = c("aut", "cre"), comment = c(ORCID = "YOUR-ORCID-ID") ), Language = "es", License = "MIT + file LICENSE" ) ) }\if{html}{\out{
}} Prior to usethis v2.0.0, \code{getOption("devtools.desc")} was consulted for backwards compatibility, but now only the \code{"usethis.description"} option is supported. } \examples{ \dontrun{ use_description() use_description(fields = list(Language = "es")) use_description_defaults() } } \seealso{ The \href{https://r-pkgs.org/description.html}{description chapter} of \href{https://r-pkgs.org}{R Packages} } usethis/man/edit.Rd0000644000176200001440000000444414717524721013745 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/edit.R \name{edit} \alias{edit} \alias{edit_r_profile} \alias{edit_r_environ} \alias{edit_r_buildignore} \alias{edit_r_makevars} \alias{edit_rstudio_snippets} \alias{edit_rstudio_prefs} \alias{edit_git_config} \alias{edit_git_ignore} \alias{edit_pkgdown_config} \title{Open configuration files} \usage{ edit_r_profile(scope = c("user", "project")) edit_r_environ(scope = c("user", "project")) edit_r_buildignore() edit_r_makevars(scope = c("user", "project")) edit_rstudio_snippets( type = c("r", "markdown", "c_cpp", "css", "html", "java", "javascript", "python", "sql", "stan", "tex", "yaml") ) edit_rstudio_prefs() edit_git_config(scope = c("user", "project")) edit_git_ignore(scope = c("user", "project")) edit_pkgdown_config() } \arguments{ \item{scope}{Edit globally for the current \strong{user}, or locally for the current \strong{project}} \item{type}{Snippet type (case insensitive text).} } \value{ Path to the file, invisibly. } \description{ \itemize{ \item \code{edit_r_profile()} opens \code{.Rprofile} \item \code{edit_r_environ()} opens \code{.Renviron} \item \code{edit_r_makevars()} opens \code{.R/Makevars} \item \code{edit_git_config()} opens \code{.gitconfig} or \code{.git/config} \item \code{edit_git_ignore()} opens global (user-level) gitignore file and ensures its path is declared in your global Git config. \item \code{edit_pkgdown_config} opens the pkgdown YAML configuration file for the current Project. \item \code{edit_rstudio_snippets()} opens RStudio's snippet config for the given type. \item \code{edit_rstudio_prefs()} opens \link[=use_rstudio_preferences]{RStudio's preference file}. } } \details{ The \verb{edit_r_*()} functions consult R's notion of user's home directory. The \verb{edit_git_*()} functions (and \pkg{usethis} in general) inherit home directory behaviour from the \pkg{fs} package, which differs from R itself on Windows. The \pkg{fs} default is more conventional in terms of the location of user-level Git config files. See \code{\link[fs:path_expand]{fs::path_home()}} for more details. Files created by \code{edit_rstudio_snippets()} will \emph{mask}, not supplement, the built-in default snippets. If you like the built-in snippets, copy them and include with your custom snippets. } usethis/man/use_readme_rmd.Rd0000644000176200001440000000370214651000165015753 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/readme.R \name{use_readme_rmd} \alias{use_readme_rmd} \alias{use_readme_md} \title{Create README files} \usage{ use_readme_rmd(open = rlang::is_interactive()) use_readme_md(open = rlang::is_interactive()) } \arguments{ \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ Creates skeleton README files with possible stubs for \itemize{ \item a high-level description of the project/package and its goals \item R code to install from GitHub, if GitHub usage detected \item a basic example } Use \code{Rmd} if you want a rich intermingling of code and output. Use \code{md} for a basic README. \code{README.Rmd} will be automatically added to \code{.Rbuildignore}. The resulting README is populated with default YAML frontmatter and R fenced code blocks (\code{md}) or chunks (\code{Rmd}). If you use \code{Rmd}, you'll still need to render it regularly, to keep \code{README.md} up-to-date. \code{devtools::build_readme()} is handy for this. You could also use GitHub Actions to re-render \code{README.Rmd} every time you push. An example workflow can be found in the \verb{examples/} directory here: \url{https://github.com/r-lib/actions/}. If the current project is a Git repo, then \code{use_readme_rmd()} automatically configures a pre-commit hook that helps keep \code{README.Rmd} and \code{README.md}, synchronized. The hook creates friction if you try to commit when \code{README.Rmd} has been edited more recently than \code{README.md}. If this hook causes more problems than it solves for you, it is implemented in \code{.git/hooks/pre-commit}, which you can modify or even delete. } \examples{ \dontrun{ use_readme_rmd() use_readme_md() } } \seealso{ The \href{https://r-pkgs.org/other-markdown.html}{other markdown files section} of \href{https://r-pkgs.org}{R Packages}. } usethis/man/use_github_labels.Rd0000644000176200001440000000737714717524721016510 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github-labels.R \name{use_github_labels} \alias{use_github_labels} \alias{use_tidy_github_labels} \alias{tidy_labels} \alias{tidy_labels_rename} \alias{tidy_label_colours} \alias{tidy_label_descriptions} \title{Manage GitHub issue labels} \usage{ use_github_labels( labels = character(), rename = character(), colours = character(), descriptions = character(), delete_default = FALSE ) use_tidy_github_labels() tidy_labels() tidy_labels_rename() tidy_label_colours() tidy_label_descriptions() } \arguments{ \item{labels}{A character vector giving labels to add.} \item{rename}{A named vector with names giving old names and values giving new names.} \item{colours, descriptions}{Named character vectors giving hexadecimal colours (like \code{e02a2a}) and longer descriptions. The names should match label names, and anything unmatched will be left unchanged. If you create a new label, and don't supply colours, it will be given a random colour.} \item{delete_default}{If \code{TRUE}, removes GitHub default labels that do not appear in the \code{labels} vector and that do not have associated issues.} } \description{ \code{use_github_labels()} can create new labels, update colours and descriptions, and optionally delete GitHub's default labels (if \code{delete_default = TRUE}). It will never delete labels that have associated issues. \code{use_tidy_github_labels()} calls \code{use_github_labels()} with tidyverse conventions powered by \code{tidy_labels()}, \code{tidy_labels_rename()}, \code{tidy_label_colours()} and \code{tidy_label_descriptions()}. \subsection{tidyverse label usage}{ Labels are used as part of the issue-triage process, designed to minimise the time spent re-reading issues. The absence of a label indicates that an issue is new, and has yet to be triaged. There are four mutually exclusive labels that indicate the overall "type" of issue: \itemize{ \item \code{bug}: an unexpected problem or unintended behavior. \item \code{documentation}: requires changes to the docs. \item \code{feature}: feature requests and enhancement. \item \code{upkeep}: general package maintenance work that makes future development easier. } Then there are five labels that are needed in most repositories: \itemize{ \item \verb{breaking change}: issue/PR will requires a breaking change so should be not be included in patch releases. \item \code{reprex} indicates that an issue does not have a minimal reproducible example, and that a reply has been sent requesting one from the user. \item \verb{good first issue} indicates a good issue for first-time contributors. \item \verb{help wanted} indicates that a maintainer wants help on an issue. \item \code{wip} indicates that someone is working on it or has promised to. } Finally most larger repos will accumulate their own labels for specific areas of functionality. For example, usethis has labels like "description", "paths", "readme", because time has shown these to be common sources of problems. These labels are helpful for grouping issues so that you can tackle related problems at the same time. Repo-specific issues should have a grey background (\verb{#eeeeee}) and an emoji. This keeps the issue page visually harmonious while still giving enough variation to easily distinguish different types of label. } } \examples{ \dontrun{ # typical use in, e.g., a new tidyverse project use_github_labels(delete_default = TRUE) # create labels without changing colours/descriptions use_github_labels( labels = c("foofy", "foofier", "foofiest"), colours = NULL, descriptions = NULL ) # change descriptions without changing names/colours use_github_labels( labels = NULL, colours = NULL, descriptions = c("foofiest" = "the foofiest issue you ever saw") ) } } usethis/man/use_coverage.Rd0000644000176200001440000000125614651000165015451 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/coverage.R \name{use_coverage} \alias{use_coverage} \alias{use_covr_ignore} \title{Test coverage} \usage{ use_coverage(type = c("codecov", "coveralls"), repo_spec = NULL) use_covr_ignore(files) } \arguments{ \item{type}{Which web service to use.} \item{repo_spec}{Optional GitHub repo specification in this form: \code{owner/repo}. This can usually be inferred from the GitHub remotes of active project.} \item{files}{Character vector of file globs.} } \description{ Adds test coverage reporting to a package, using either Codecov (\verb{https://codecov.io}) or Coveralls (\verb{https://coveralls.io}). } usethis/man/use_latest_dependencies.Rd0000644000176200001440000000137514651000165017662 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/latest-dependencies.R \name{use_latest_dependencies} \alias{use_latest_dependencies} \title{Use "latest" versions of all dependencies} \usage{ use_latest_dependencies(overwrite = TRUE, source = c("CRAN", "local")) } \arguments{ \item{overwrite}{By default (\code{TRUE}), all dependencies will be modified. Set to \code{FALSE} to only modify dependencies without version specifications.} \item{source}{Use "CRAN" or "local" package versions.} } \description{ Pins minimum versions of all \code{Imports} and \code{Depends} dependencies to latest ones (as determined by \code{source}). Useful for the tidyverse package, but should otherwise be used with extreme care. } \keyword{internal} usethis/man/use_logo.Rd0000644000176200001440000000154114651000165014613 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/logo.R \name{use_logo} \alias{use_logo} \title{Use a package logo} \usage{ use_logo(img, geometry = "240x278", retina = TRUE) } \arguments{ \item{img}{The path to an existing image file} \item{geometry}{a \link[magick:geometry]{magick::geometry} string specifying size. The default assumes that you have a hex logo using spec from \url{http://hexb.in/sticker.html}.} \item{retina}{\code{TRUE}, the default, scales the image on the README, assuming that geometry is double the desired size.} } \description{ This function helps you use a logo in your package: \itemize{ \item Enforces a specific size \item Stores logo image file at \code{man/figures/logo.png} \item Produces the markdown text you need in README to include the logo } } \examples{ \dontrun{ use_logo("usethis.png") } } usethis/man/use_blank_slate.Rd0000644000176200001440000000155714651000165016141 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rstudio.R \name{use_blank_slate} \alias{use_blank_slate} \title{Don't save/load user workspace between sessions} \usage{ use_blank_slate(scope = c("user", "project")) } \arguments{ \item{scope}{Edit globally for the current \strong{user}, or locally for the current \strong{project}} } \description{ R can save and reload the user's workspace between sessions via an \code{.RData} file in the current directory. However, long-term reproducibility is enhanced when you turn this feature off and clear R's memory at every restart. Starting with a blank slate provides timely feedback that encourages the development of scripts that are complete and self-contained. More detail can be found in the blog post \href{https://www.tidyverse.org/blog/2017/12/workflow-vs-script/}{Project-oriented workflow}. } usethis/man/use_course_details.Rd0000644000176200001440000001601014651000165016655 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/course.R \name{use_course_details} \alias{use_course_details} \alias{tidy_download} \alias{tidy_unzip} \alias{create_download_url} \title{Helpers to download and unpack a ZIP file} \usage{ tidy_download(url, destdir = getwd()) tidy_unzip(zipfile, cleanup = FALSE) create_download_url(url) } \arguments{ \item{url}{A GitHub, DropBox, or Google Drive URL. \itemize{ \item For \code{create_download_url()}: A URL copied from a web browser. \item For \code{tidy_download()}: A download link for a ZIP file, possibly behind a shortlink or other redirect. \code{create_download_url()} can be helpful for creating this URL from typical browser URLs. }} \item{destdir}{Path to existing local directory where the ZIP file will be stored. Defaults to current working directory, but note that \code{\link[=use_course]{use_course()}} has different default behavior.} \item{zipfile}{Path to local ZIP file.} \item{cleanup}{Whether to delete the ZIP file after unpacking. In an interactive session, \code{cleanup = NA} leads to asking the user if they want to delete or keep the ZIP file.} } \description{ Details on the internal and helper functions that power \code{\link[=use_course]{use_course()}} and \code{\link[=use_zip]{use_zip()}}. Only \code{create_download_url()} is exported. } \section{tidy_download()}{ \if{html}{\out{
}}\preformatted{# how it's used inside use_course() tidy_download( # url has been processed with internal helper normalize_url() url, # conspicuous_place() = `getOption('usethis.destdir')` or desktop or home # directory or working directory destdir = destdir \%||\% conspicuous_place() ) }\if{html}{\out{
}} Special-purpose function to download a ZIP file and automatically determine the file name, which often determines the folder name after unpacking. Developed with DropBox and GitHub as primary targets, possibly via shortlinks. Both platforms offer a way to download an entire folder or repo as a ZIP file, with information about the original folder or repo transmitted in the \code{Content-Disposition} header. In the absence of this header, a filename is generated from the input URL. In either case, the filename is sanitized. Returns the path to downloaded ZIP file, invisibly. \code{tidy_download()} is setup to retry after a download failure. In an interactive session, it asks for user's consent. All retries use a longer connect timeout. \subsection{DropBox}{ To make a folder available for ZIP download, create a shared link for it: \itemize{ \item \url{https://help.dropbox.com/share/create-and-share-link} } A shared link will have this form: \if{html}{\out{
}}\preformatted{https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=0 }\if{html}{\out{
}} Replace the \code{dl=0} at the end with \code{dl=1} to create a download link: \if{html}{\out{
}}\preformatted{https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=1 }\if{html}{\out{
}} You can use \code{create_download_url()} to do this conversion. This download link (or a shortlink that points to it) is suitable as input for \code{tidy_download()}. After one or more redirections, this link will eventually lead to a download URL. For more details, see \url{https://help.dropbox.com/share/force-download} and \url{https://help.dropbox.com/sync/download-entire-folders}. } \subsection{GitHub}{ Click on the repo's "Clone or download" button, to reveal a "Download ZIP" button. Capture this URL, which will have this form: \if{html}{\out{
}}\preformatted{https://github.com/r-lib/usethis/archive/main.zip }\if{html}{\out{
}} This download link (or a shortlink that points to it) is suitable as input for \code{tidy_download()}. After one or more redirections, this link will eventually lead to a download URL. Here are other links that also lead to ZIP download, albeit with a different filenaming scheme (REF could be a branch name, a tag, or a SHA): \if{html}{\out{
}}\preformatted{https://github.com/github.com/r-lib/usethis/zipball/HEAD https://api.github.com/repos/r-lib/rematch2/zipball/REF https://api.github.com/repos/r-lib/rematch2/zipball/HEAD https://api.github.com/repos/r-lib/usethis/zipball/REF }\if{html}{\out{
}} You can use \code{create_download_url()} to create the "Download ZIP" URL from a typical GitHub browser URL. } \subsection{Google Drive}{ To our knowledge, it is not possible to download a Google Drive folder as a ZIP archive. It is however possible to share a ZIP file stored on Google Drive. To get its URL, click on "Get the shareable link" (within the "Share" menu). This URL doesn't allow for direct download, as it's designed to be processed in a web browser first. Such a sharing link looks like: \if{html}{\out{
}}\preformatted{https://drive.google.com/open?id=123456789xxyyyzzz }\if{html}{\out{
}} To be able to get the URL suitable for direct download, you need to extract the "id" element from the URL and include it in this URL format: \if{html}{\out{
}}\preformatted{https://drive.google.com/uc?export=download&id=123456789xxyyyzzz }\if{html}{\out{
}} Use \code{create_download_url()} to perform this transformation automatically. } } \section{tidy_unzip()}{ Special-purpose function to unpack a ZIP file and (attempt to) create the directory structure most people want. When unpacking an archive, it is easy to get one more or one less level of nesting than you expected. It's especially important to finesse the directory structure here: we want the same local result when unzipping the same content from either GitHub or DropBox ZIP files, which pack things differently. Here is the intent: \itemize{ \item If the ZIP archive \code{foo.zip} does not contain a single top-level directory, i.e. it is packed as "loose parts", unzip into a directory named \code{foo}. Typical of DropBox ZIP files. \item If the ZIP archive \code{foo.zip} has a single top-level directory (which, by the way, is not necessarily called "foo"), unpack into said directory. Typical of GitHub ZIP files. } Returns path to the directory holding the unpacked files, invisibly. \strong{DropBox:} The ZIP files produced by DropBox are special. The file list tends to contain a spurious directory \code{"/"}, which we ignore during unzip. Also, if the directory is a Git repo and/or RStudio Project, we unzip-ignore various hidden files, such as \code{.RData}, \code{.Rhistory}, and those below \verb{.git/} and \code{.Rproj.user}. } \examples{ \dontrun{ tidy_download("https://github.com/r-lib/rematch2/archive/main.zip") tidy_unzip("rematch2-main.zip") } # GitHub create_download_url("https://github.com/r-lib/usethis") create_download_url("https://github.com/r-lib/usethis/issues") # DropBox create_download_url("https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=0") # Google Drive create_download_url("https://drive.google.com/open?id=123456789xxyyyzzz") create_download_url("https://drive.google.com/open?id=123456789xxyyyzzz/view") } \keyword{internal} usethis/man/use_git.Rd0000644000176200001440000000113214651000165014432 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{use_git} \alias{use_git} \title{Initialise a git repository} \usage{ use_git(message = "Initial commit") } \arguments{ \item{message}{Message to use for first commit.} } \description{ \code{use_git()} initialises a Git repository and adds important files to \code{.gitignore}. If user consents, it also makes an initial commit. } \examples{ \dontrun{ use_git() } } \seealso{ Other git helpers: \code{\link{use_git_config}()}, \code{\link{use_git_hook}()}, \code{\link{use_git_ignore}()} } \concept{git helpers} usethis/man/use_data_table.Rd0000644000176200001440000000167614651000165015744 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data-table.R \name{use_data_table} \alias{use_data_table} \title{Prepare for importing data.table} \usage{ use_data_table() } \description{ \code{use_data_table()} imports the \code{data.table()} function from the data.table package, as well as several important symbols: \verb{:=}, \code{.SD}, \code{.BY}, \code{.N}, \code{.I}, \code{.GRP}, \code{.NGRP}, \code{.EACHI}. This is a minimal setup and you can learn much more in the "Importing data.table" vignette: \verb{https://rdatatable.gitlab.io/data.table/articles/datatable-importing.html}. In addition to importing these functions, \code{use_data_table()} also blocks the usage of data.table in the \code{Depends} field of the \code{DESCRIPTION} file; \code{data.table} should be used as an \emph{imported} or \emph{suggested} package only. See this \href{https://github.com/Rdatatable/data.table/issues/3076}{discussion}. } usethis/man/use_standalone.Rd0000644000176200001440000000607414651514262016021 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/use_standalone.R \name{use_standalone} \alias{use_standalone} \title{Use a standalone file from another repo} \usage{ use_standalone(repo_spec, file = NULL, ref = NULL, host = NULL) } \arguments{ \item{repo_spec}{A string identifying the GitHub repo in one of these forms: \itemize{ \item Plain \code{OWNER/REPO} spec \item Browser URL, such as \code{"https://github.com/OWNER/REPO"} \item HTTPS Git URL, such as \code{"https://github.com/OWNER/REPO.git"} \item SSH Git URL, such as \code{"git@github.com:OWNER/REPO.git"} }} \item{file}{Name of standalone file. The \verb{standalone-} prefix and file extension are optional. If omitted, will allow you to choose from the standalone files offered by that repo.} \item{ref}{The name of a branch, tag, or commit. By default, the file at \code{path} will be copied from its current state in the repo's default branch. This is extracted from \code{repo_spec} when user provides a URL.} \item{host}{GitHub host to target, passed to the \code{.api_url} argument of \code{\link[gh:gh]{gh::gh()}}. If \code{repo_spec} is a URL, \code{host} is extracted from that. If unspecified, gh defaults to "https://api.github.com", although gh's default can be customised by setting the GITHUB_API_URL environment variable. For a hypothetical GitHub Enterprise instance, either "https://github.acme.com/api/v3" or "https://github.acme.com" is acceptable.} } \description{ A "standalone" file implements a minimum set of functionality in such a way that it can be copied into another package. \code{use_standalone()} makes it easy to get such a file into your own repo. It always overwrites an existing standalone file of the same name, making it easy to update previously imported code. } \section{Supported fields}{ A standalone file has YAML frontmatter that provides additional information, such as where the file originates from and when it was last updated. Here is an example: \if{html}{\out{
}}\preformatted{--- repo: r-lib/rlang file: standalone-types-check.R last-updated: 2023-03-07 license: https://unlicense.org dependencies: standalone-obj-type.R imports: rlang (>= 1.1.0) --- }\if{html}{\out{
}} Two of these fields are consulted by \code{use_standalone()}: \itemize{ \item \code{dependencies}: A file or a list of files in the same repo that the standalone file depends on. These files are retrieved automatically by \code{use_standalone()}. \item \code{imports}: A package or list of packages that the standalone file depends on. A minimal version may be specified in parentheses, e.g. \verb{rlang (>= 1.0.0)}. These dependencies are passed to \code{\link[=use_package]{use_package()}} to ensure they are included in the \verb{Imports:} field of the \code{DESCRIPTION} file. } Note that lists are specified with standard YAML syntax, using square brackets, for example: \verb{imports: [rlang (>= 1.0.0), purrr]}. } \examples{ \dontrun{ use_standalone("r-lib/rlang", file = "types-check") use_standalone("r-lib/rlang", file = "types-check", ref = "standalone-dep") } } usethis/man/proj_sitrep.Rd0000644000176200001440000000157214651000165015343 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sitrep.R \name{proj_sitrep} \alias{proj_sitrep} \title{Report working directory and usethis/RStudio project} \usage{ proj_sitrep() } \value{ A named list, with S3 class \code{sitrep} (for printing purposes), reporting current working directory, active usethis project, and active RStudio Project } \description{ \code{proj_sitrep()} reports \itemize{ \item current working directory \item the active usethis project \item the active RStudio Project } Call this function if things seem weird and you're not sure what's wrong or how to fix it. Usually, all three of these should coincide (or be unset) and \code{proj_sitrep()} provides suggested commands for getting back to this happy state. } \examples{ proj_sitrep() } \seealso{ Other project functions: \code{\link{proj_utils}} } \concept{project functions} usethis/man/use_cran_comments.Rd0000644000176200001440000000145414651000165016506 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/cran.R \name{use_cran_comments} \alias{use_cran_comments} \title{CRAN submission comments} \usage{ use_cran_comments(open = rlang::is_interactive()) } \arguments{ \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ Creates \code{cran-comments.md}, a template for your communications with CRAN when submitting a package. The goal is to clearly communicate the steps you have taken to check your package on a wide range of operating systems. If you are submitting an update to a package that is used by other packages, you also need to summarize the results of your \link[=use_revdep]{reverse dependency checks}. } usethis/man/use_test_helper.Rd0000644000176200001440000000204314717524721016203 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/r.R \name{use_test_helper} \alias{use_test_helper} \title{Create or edit a test helper file} \usage{ use_test_helper(name = NULL, open = rlang::is_interactive()) } \arguments{ \item{name}{Can be used to specify the optional "SLUG" in \code{tests/testthat/helper-SLUG.R}.} \item{open}{Whether to open the file for interactive editing.} } \description{ This function creates (or opens) a test helper file, typically \code{tests/testthat/helper.R}. Test helper files are executed at the beginning of every automated test run and are also executed by \code{\link[pkgload:load_all]{load_all()}}. A helper file is a great place to define test helper functions for use throughout your test suite, such as a custom expectation. } \examples{ \dontrun{ use_test_helper() use_test_helper("mocks") } } \seealso{ \itemize{ \item \code{\link[=use_test]{use_test()}} to create a test file. \item The testthat vignette on special files \code{vignette("special-files", package = "testthat")}. } } usethis/man/use_data.Rd0000644000176200001440000000472614717524721014610 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \name{use_data} \alias{use_data} \alias{use_data_raw} \title{Create package data} \usage{ use_data( ..., internal = FALSE, overwrite = FALSE, compress = "bzip2", version = 3, ascii = FALSE ) use_data_raw(name = "DATASET", open = rlang::is_interactive()) } \arguments{ \item{...}{Unquoted names of existing objects to save.} \item{internal}{If \code{FALSE}, saves each object in its own \code{.rda} file in the \verb{data/} directory. These data files bypass the usual export mechanism and are available whenever the package is loaded (or via \code{\link[=data]{data()}} if \code{LazyData} is not true). If \code{TRUE}, stores all objects in a single \code{R/sysdata.rda} file. Objects in this file follow the usual export rules. Note that this means they will be exported if you are using the common \code{exportPattern()} rule which exports all objects except for those that start with \code{.}.} \item{overwrite}{By default, \code{use_data()} will not overwrite existing files. If you really want to do so, set this to \code{TRUE}.} \item{compress}{Choose the type of compression used by \code{\link[=save]{save()}}. Should be one of "gzip", "bzip2", or "xz".} \item{version}{The serialization format version to use. The default, 3, can only be read by R versions 3.5.0 and higher. For R 1.4.0 to 3.5.3, use version 2.} \item{ascii}{if \code{TRUE}, an ASCII representation of the data is written. The default value of \code{ascii} is \code{FALSE} which leads to a binary file being written. If \code{NA} and \code{version >= 2}, a different ASCII representation is used which writes double/complex numbers as binary fractions.} \item{name}{Name of the dataset to be prepared for inclusion in the package.} \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ \code{use_data()} makes it easy to save package data in the correct format. I recommend you save scripts that generate package data in \code{data-raw}: use \code{use_data_raw()} to set it up. You also need to document exported datasets. } \examples{ \dontrun{ x <- 1:10 y <- 1:100 use_data(x, y) # For external use use_data(x, y, internal = TRUE) # For internal use } \dontrun{ use_data_raw("daisy") } } \seealso{ The \href{https://r-pkgs.org/data.html}{data chapter} of \href{https://r-pkgs.org}{R Packages}. } usethis/man/write-this.Rd0000644000176200001440000000365414651000165015105 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/write.R \name{write-this} \alias{write-this} \alias{write_union} \alias{write_over} \title{Write into or over a file} \usage{ write_union(path, lines, quiet = FALSE) write_over(path, lines, quiet = FALSE, overwrite = FALSE) } \arguments{ \item{path}{Path to target file. It is created if it does not exist, but the parent directory must exist.} \item{lines}{Character vector of lines. For \code{write_union()}, these are lines to add to the target file, if not already present. For \code{write_over()}, these are the exact lines desired in the target file.} \item{quiet}{Logical. Whether to message about what is happening.} \item{overwrite}{Force overwrite of existing file?} } \value{ Logical indicating whether a write occurred, invisibly. } \description{ Helpers to write into or over a new or pre-existing file. Designed mostly for for internal use. File is written with UTF-8 encoding. } \section{Functions}{ \itemize{ \item \code{write_union()}: writes lines to a file, taking the union of what's already there, if anything, and some new lines. Note, there is no explicit promise about the line order. Designed to modify simple config files like \code{.Rbuildignore} and \code{.gitignore}. \item \code{write_over()}: writes a file with specific lines, creating it if necessary or overwriting existing, if proposed contents are not identical and user is available to give permission. }} \examples{ \dontshow{ .old_wd <- setwd(tempdir()) } write_union("a_file", letters[1:3]) readLines("a_file") write_union("a_file", letters[1:5]) readLines("a_file") write_over("another_file", letters[1:3]) readLines("another_file") write_over("another_file", letters[1:3]) \dontrun{ ## will error if user isn't present to approve the overwrite write_over("another_file", letters[3:1]) } ## clean up file.remove("a_file", "another_file") \dontshow{ setwd(.old_wd) } } \keyword{internal} usethis/man/use_roxygen_md.Rd0000644000176200001440000000114414651000165016025 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/roxygen.R \name{use_roxygen_md} \alias{use_roxygen_md} \title{Use roxygen2 with markdown} \usage{ use_roxygen_md(overwrite = FALSE) } \arguments{ \item{overwrite}{Whether to overwrite an existing \code{Roxygen} field in \code{DESCRIPTION} with \code{"list(markdown = TRUE)"}.} } \description{ If you are already using roxygen2, but not with markdown, you'll need to use \href{https://roxygen2md.r-lib.org}{roxygen2md} to convert existing Rd expressions to markdown. The conversion is not perfect, so make sure to check the results. } usethis/man/use_make.Rd0000644000176200001440000000054214651000165014570 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/make.R \name{use_make} \alias{use_make} \title{Create Makefile} \usage{ use_make() } \description{ \code{use_make()} adds a basic Makefile to the project root directory. } \seealso{ The \href{https://www.gnu.org/software/make/manual/html_node/}{documentation for GNU Make}. } usethis/man/use_template.Rd0000644000176200001440000000423214651000165015466 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/template.R \name{use_template} \alias{use_template} \title{Use a usethis-style template} \usage{ use_template( template, save_as = template, data = list(), ignore = FALSE, open = FALSE, package = "usethis" ) } \arguments{ \item{template}{Path to template file relative to \verb{templates/} directory within \code{package}; see details.} \item{save_as}{Path of file to create, relative to root of active project. Defaults to \code{template}} \item{data}{A list of data passed to the template.} \item{ignore}{Should the newly created file be added to \code{.Rbuildignore}?} \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} \item{package}{Name of the package where the template is found.} } \value{ A logical vector indicating if file was modified. } \description{ Creates a file from data and a template found in a package. Provides control over file name, the addition to \code{.Rbuildignore}, and opening the file for inspection. } \details{ This function can be used as the engine for a templating function in other packages. The \code{template} argument is used along with the \code{package} argument to derive the path to your template file; it will be expected at \code{fs::path_package(package = package, "templates", template)}. We use \code{fs::path_package()} instead of \code{base::system.file()} so that path construction works even in a development workflow, e.g., works with \code{devtools::load_all()} or \code{pkgload::load_all()}. \emph{Note this describes the behaviour of \code{fs::path_package()} in fs v1.2.7.9001 and higher.} To interpolate your data into the template, supply a list using the \code{data} argument. Internally, this function uses \code{\link[whisker:whisker.render]{whisker::whisker.render()}} to combine your template file with your data. } \examples{ \dontrun{ # Note: running this will write `NEWS.md` to your working directory use_template( template = "NEWS.md", data = list(Package = "acme", Version = "1.2.3"), package = "usethis" ) } } usethis/man/use_rstudio.Rd0000644000176200001440000000254514651000165015351 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rstudio.R \name{use_rstudio} \alias{use_rstudio} \title{Add RStudio Project infrastructure} \usage{ use_rstudio(line_ending = c("posix", "windows"), reformat = TRUE) } \arguments{ \item{line_ending}{Line ending} \item{reformat}{If \code{TRUE}, the \code{.Rproj} is setup with common options that reformat files on save: adding a trailing newline, trimming trailing whitespace, and setting the line-ending. This is best practice for new projects. If \code{FALSE}, these options are left unset, which is more appropriate when you're contributing to someone else's project that does not have its own \code{.Rproj} file.} } \description{ It is likely that you want to use \code{\link[=create_project]{create_project()}} or \code{\link[=create_package]{create_package()}} instead of \code{use_rstudio()}! Both \verb{create_*()} functions can add RStudio Project infrastructure to a pre-existing project or package. \code{use_rstudio()} is mostly for internal use or for those creating a usethis-like package for their organization. It does the following in the current project, often after executing \code{proj_set(..., force = TRUE)}: \itemize{ \item Creates an \code{.Rproj} file \item Adds RStudio files to \code{.gitignore} \item Adds RStudio files to \code{.Rbuildignore}, if project is a package } } usethis/man/proj_activate.Rd0000644000176200001440000000103514651000165015627 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/proj.R \name{proj_activate} \alias{proj_activate} \title{Activate a project} \usage{ proj_activate(path) } \arguments{ \item{path}{Project directory} } \value{ Single logical value indicating if current session is modified. } \description{ Activates a project in usethis, R session, and (if relevant) RStudio senses. If you are in RStudio, this will open a new RStudio session. If not, it will change the working directory and \link[=proj_set]{active project}. } usethis/man/usethis-package.Rd0000644000176200001440000000244414651000165016057 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/usethis-package.R \docType{package} \name{usethis-package} \alias{usethis} \alias{usethis-package} \title{usethis: Automate Package and Project Setup} \description{ \if{html}{\figure{logo.png}{options: style='float: right' alt='logo' width='120'}} Automate package and project setup tasks that are otherwise performed manually. This includes setting up unit testing, test coverage, continuous integration, Git, 'GitHub', licenses, 'Rcpp', 'RStudio' projects, and more. } \seealso{ Useful links: \itemize{ \item \url{https://usethis.r-lib.org} \item \url{https://github.com/r-lib/usethis} \item Report bugs at \url{https://github.com/r-lib/usethis/issues} } } \author{ \strong{Maintainer}: Jennifer Bryan \email{jenny@posit.co} (\href{https://orcid.org/0000-0002-6983-2759}{ORCID}) Authors: \itemize{ \item Hadley Wickham \email{hadley@posit.co} (\href{https://orcid.org/0000-0003-4757-117X}{ORCID}) \item Malcolm Barrett \email{malcolmbarrett@gmail.com} (\href{https://orcid.org/0000-0003-0299-5825}{ORCID}) \item Andy Teucher \email{andy.teucher@posit.co} (\href{https://orcid.org/0000-0002-7840-692X}{ORCID}) } Other contributors: \itemize{ \item Posit Software, PBC [copyright holder, funder] } } \keyword{internal} usethis/man/use_git_ignore.Rd0000644000176200001440000000104514651000165016000 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{use_git_ignore} \alias{use_git_ignore} \title{Tell Git to ignore files} \usage{ use_git_ignore(ignores, directory = ".") } \arguments{ \item{ignores}{Character vector of ignores, specified as file globs.} \item{directory}{Directory relative to active project to set ignores} } \description{ Tell Git to ignore files } \seealso{ Other git helpers: \code{\link{use_git}()}, \code{\link{use_git_config}()}, \code{\link{use_git_hook}()} } \concept{git helpers} usethis/man/git_branch_default.Rd0000644000176200001440000000101714717524721016615 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/usethis-deprecated.R \name{git_branch_default} \alias{git_branch_default} \title{Deprecated Git functions} \usage{ git_branch_default() } \description{ \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}} \code{git_branch_default()} has been replaced by \code{\link[=git_default_branch]{git_default_branch()}}. } \keyword{internal} usethis/man/edit_file.Rd0000644000176200001440000000203614651000165014723 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/edit.R \name{edit_file} \alias{edit_file} \alias{edit_template} \title{Open file for editing} \usage{ edit_file(path, open = rlang::is_interactive()) edit_template(template = NULL, open = rlang::is_interactive()) } \arguments{ \item{path}{Path to target file.} \item{open}{Whether to open the file for interactive editing.} \item{template}{The target template file. If not specified, existing template files are offered for interactive selection.} } \value{ Target path, invisibly. } \description{ Opens a file for editing in RStudio, if that is the active environment, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise. If the file does not exist, it is created. If the parent directory does not exist, it is also created. \code{edit_template()} specifically opens templates in \code{inst/templates} for use with \code{\link[=use_template]{use_template()}}. } \examples{ \dontrun{ edit_file("DESCRIPTION") edit_file("~/.gitconfig") } } \keyword{internal} usethis/man/use_import_from.Rd0000644000176200001440000000215614651000165016213 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/use_import_from.R \name{use_import_from} \alias{use_import_from} \title{Import a function from another package} \usage{ use_import_from(package, fun, load = is_interactive()) } \arguments{ \item{package}{Package name} \item{fun}{A vector of function names} \item{load}{Logical. Re-load with \code{\link[pkgload:load_all]{pkgload::load_all()}}?} } \value{ Invisibly, \code{TRUE} if the package document has changed, \code{FALSE} if not. } \description{ \code{use_import_from()} imports a function from another package by adding the roxygen2 \verb{@importFrom} tag to the package-level documentation (which can be created with \code{\link[=use_package_doc]{use_package_doc()}}). Importing a function from another package allows you to refer to it without a namespace (e.g., \code{fun()} instead of \code{package::fun()}). \code{use_import_from()} also re-documents the NAMESPACE, and re-load the current package. This ensures that \code{fun} is immediately available in your development session. } \examples{ \dontrun{ use_import_from("glue", "glue") } } usethis/man/browse-this.Rd0000644000176200001440000000550714717524721015267 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/browse.R \name{browse-this} \alias{browse-this} \alias{browse_package} \alias{browse_project} \alias{browse_github} \alias{browse_github_issues} \alias{browse_github_pulls} \alias{browse_github_actions} \alias{browse_circleci} \alias{browse_cran} \title{Visit important project-related web pages} \usage{ browse_package(package = NULL) browse_project() browse_github(package = NULL) browse_github_issues(package = NULL, number = NULL) browse_github_pulls(package = NULL, number = NULL) browse_github_actions(package = NULL) browse_circleci(package = NULL) browse_cran(package = NULL) } \arguments{ \item{package}{Name of package. If \code{NULL}, the active project is targeted, regardless of whether it's an R package or not.} \item{number}{Optional, to specify an individual GitHub issue or pull request. Can be a number or \code{"new"}.} } \description{ These functions take you to various web pages associated with a project (often, an R package) and return the target URL(s) invisibly. To form these URLs we consult: \itemize{ \item Git remotes configured for the active project that appear to be hosted on a GitHub deployment \item DESCRIPTION file for the active project or the specified \code{package}. The DESCRIPTION file is sought first in the local package library and then on CRAN. \item Fixed templates: \itemize{ \item Circle CI: \verb{https://circleci.com/gh/\{OWNER\}/\{PACKAGE\}} \item CRAN landing page: \verb{https://cran.r-project.org/package=\{PACKAGE\}} \item GitHub mirror of a CRAN package: \verb{https://github.com/cran/\{PACKAGE\}} Templated URLs aren't checked for existence, so there is no guarantee there will be content at the destination. } } } \details{ \itemize{ \item \code{browse_package()}: Assembles a list of URLs and lets user choose one to visit in a web browser. In a non-interactive session, returns all discovered URLs. \item \code{browse_project()}: Thin wrapper around \code{browse_package()} that always targets the active usethis project. \item \code{browse_github()}: Visits a GitHub repository associated with the project. In the case of a fork, you might be asked to specify if you're interested in the source repo or your fork. \item \code{browse_github_issues()}: Visits the GitHub Issues index or one specific issue. \item \code{browse_github_pulls()}: Visits the GitHub Pull Request index or one specific pull request. \item \code{browse_circleci()}: Visits the project's page on \href{https://circleci.com}{Circle CI}. \item \code{browse_cran()}: Visits the package on CRAN, via the canonical URL. } } \examples{ # works on the active project # browse_project() browse_package("httr") browse_github("gh") browse_github_issues("fs") browse_github_issues("fs", 1) browse_github_pulls("curl") browse_github_pulls("curl", 183) browse_cran("MASS") } usethis/man/use_testthat.Rd0000644000176200001440000000162014651000165015511 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test.R \name{use_testthat} \alias{use_testthat} \title{Sets up overall testing infrastructure} \usage{ use_testthat(edition = NULL, parallel = FALSE) } \arguments{ \item{edition}{testthat edition to use. Defaults to the latest edition, i.e. the major version number of the currently installed testthat.} \item{parallel}{Should tests be run in parallel? This feature appeared in testthat 3.0.0; see \url{https://testthat.r-lib.org/articles/parallel.html} for details and caveats.} } \description{ Creates \verb{tests/testthat/}, \code{tests/testthat.R}, and adds the testthat package to the Suggests field. Learn more in \url{https://r-pkgs.org/testing-basics.html} } \examples{ \dontrun{ use_testthat() use_test() use_test("something-management") } } \seealso{ \code{\link[=use_test]{use_test()}} to create individual test files } usethis/man/github-token.Rd0000644000176200001440000000624114651000165015401 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github_token.R \name{github-token} \alias{github-token} \alias{create_github_token} \alias{gh_token_help} \title{Get help with GitHub personal access tokens} \usage{ create_github_token( scopes = c("repo", "user", "gist", "workflow"), description = "DESCRIBE THE TOKEN'S USE CASE", host = NULL ) gh_token_help(host = NULL) } \arguments{ \item{scopes}{Character vector of token scopes, pre-selected in the web form. Final choices are made in the GitHub form. Read more about GitHub API scopes at \url{https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/}.} \item{description}{Short description or nickname for the token. You might (eventually) have multiple tokens on your GitHub account and a label can help you keep track of what each token is for.} \item{host}{GitHub host to target, passed to the \code{.api_url} argument of \code{\link[gh:gh]{gh::gh()}}. If unspecified, gh defaults to "https://api.github.com", although gh's default can be customised by setting the GITHUB_API_URL environment variable. For a hypothetical GitHub Enterprise instance, either "https://github.acme.com/api/v3" or "https://github.acme.com" is acceptable.} } \value{ Nothing } \description{ A \href{https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line}{personal access token} (PAT) is needed for certain tasks usethis does via the GitHub API, such as creating a repository, a fork, or a pull request. If you use HTTPS remotes, your PAT is also used when interacting with GitHub as a conventional Git remote. These functions help you get and manage your PAT: \itemize{ \item \code{gh_token_help()} guides you through token troubleshooting and setup. \item \code{create_github_token()} opens a browser window to the GitHub form to generate a PAT, with suggested scopes pre-selected. It also offers advice on storing your PAT. \item \code{gitcreds::gitcreds_set()} helps you register your PAT with the Git credential manager used by your operating system. Later, other packages, such as usethis, gert, and gh can automatically retrieve that PAT and use it to work with GitHub on your behalf. } Usually, the first time the PAT is retrieved in an R session, it is cached in an environment variable, for easier reuse for the duration of that R session. After initial acquisition and storage, all of this should happen automatically in the background. GitHub is encouraging the use of PATs that expire after, e.g., 30 days, so prepare yourself to re-generate and re-store your PAT periodically. Git/GitHub credential management is covered in a dedicated article: \href{https://usethis.r-lib.org/articles/articles/git-credentials.html}{Managing Git(Hub) Credentials} } \details{ \code{create_github_token()} has previously gone by some other names: \code{browse_github_token()} and \code{browse_github_pat()}. } \examples{ \dontrun{ create_github_token() } \dontrun{ gh_token_help() } } \seealso{ \code{\link[gh:gh_whoami]{gh::gh_whoami()}} for information on an existing token and \code{gitcreds::gitcreds_set()} and \code{gitcreds::gitcreds_get()} for a secure way to store and retrieve your PAT. } usethis/man/use_code_of_conduct.Rd0000644000176200001440000000264714651000165017000 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/code-of-conduct.R \name{use_code_of_conduct} \alias{use_code_of_conduct} \title{Add a code of conduct} \usage{ use_code_of_conduct(contact, path = NULL) } \arguments{ \item{contact}{Contact details for making a code of conduct report. Usually an email address.} \item{path}{Path of the directory to put \code{CODE_OF_CONDUCT.md} in, relative to the active project. Passed along to \code{\link[=use_directory]{use_directory()}}. Default is to locate at top-level, but \verb{.github/} is also common.} } \description{ Adds a \code{CODE_OF_CONDUCT.md} file to the active project and lists in \code{.Rbuildignore}, in the case of a package. The goal of a code of conduct is to foster an environment of inclusiveness, and to explicitly discourage inappropriate behaviour. The template comes from \url{https://www.contributor-covenant.org}, version 2.1: \url{https://www.contributor-covenant.org/version/2/1/code_of_conduct/}. } \details{ If your package is going to CRAN, the link to the CoC in your README must be an absolute link to a rendered website as \code{CODE_OF_CONDUCT.md} is not included in the package sent to CRAN. \code{use_code_of_conduct()} will automatically generate this link if (1) you use pkgdown and (2) have set the \code{url} field in \verb{_pkgdown.yml}; otherwise it will link to a copy of the CoC on \url{https://www.contributor-covenant.org}. } usethis/man/use_vignette.Rd0000644000176200001440000000366714721145632015522 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/vignette.R \name{use_vignette} \alias{use_vignette} \alias{use_article} \title{Create a vignette or article} \usage{ use_vignette(name, title = NULL) use_article(name, title = NULL) } \arguments{ \item{name}{File name to use for new vignette. Should consist only of numbers, letters, \verb{_} and \code{-}. Lower case is recommended. Can include the \code{".Rmd"} or \code{".qmd"} file extension, which also dictates whether to place an R Markdown or Quarto vignette. R Markdown (\code{".Rmd"}) is the current default, but it is anticipated that Quarto (\code{".qmd"}) will become the default in the future.} \item{title}{The title of the vignette. If not provided, a title is generated from \code{name}.} } \description{ Creates a new vignette or article in \verb{vignettes/}. Articles are a special type of vignette that appear on pkgdown websites, but are not included in the package itself (because they are added to \code{.Rbuildignore} automatically). } \section{General setup}{ \itemize{ \item Adds needed packages to \code{DESCRIPTION}. \item Adds \code{inst/doc} to \code{.gitignore} so built vignettes aren't tracked. \item Adds \verb{vignettes/*.html} and \verb{vignettes/*.R} to \code{.gitignore} so you never accidentally track rendered vignettes. \item For \verb{*.qmd}, adds Quarto-related patterns to \code{.gitignore} and \code{.Rbuildignore}. } } \examples{ \dontrun{ use_vignette("how-to-do-stuff", "How to do stuff") use_vignette("r-markdown-is-classic.Rmd", "R Markdown is classic") use_vignette("quarto-is-cool.qmd", "Quarto is cool") } } \seealso{ \itemize{ \item The \href{https://r-pkgs.org/vignettes.html}{vignettes chapter} of \href{https://r-pkgs.org}{R Packages} \item The pkgdown vignette on Quarto: \code{vignette("quarto", package = "pkgdown")} \item The quarto (as in the R package) vignette on HTML vignettes: \code{vignette("hello", package = "quarto")} } } usethis/man/use_tidy_thanks.Rd0000644000176200001440000000427314651000165016201 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tidyverse.R \name{use_tidy_thanks} \alias{use_tidy_thanks} \title{Identify contributors via GitHub activity} \usage{ use_tidy_thanks(repo_spec = NULL, from = NULL, to = NULL) } \arguments{ \item{repo_spec}{Optional GitHub repo specification in any form accepted for the \code{repo_spec} argument of \code{\link[=create_from_github]{create_from_github()}} (plain spec or a browser or Git URL). A URL specification is the only way to target a GitHub host other than \code{"github.com"}, which is the default.} \item{from, to}{GitHub ref (i.e., a SHA, tag, or release) or a timestamp in ISO 8601 format, specifying the start or end of the interval of interest, in the sense of \verb{[from, to]}. Examples: "08a560d", "v1.3.0", "2018-02-24T00:13:45Z", "2018-05-01". When \verb{from = NULL, to = NULL}, we set \code{from} to the timestamp of the most recent (GitHub) release. Otherwise, \code{NULL} means "no bound".} } \value{ A character vector of GitHub usernames, invisibly. } \description{ Derives a list of GitHub usernames, based on who has opened issues or pull requests. Used to populate the acknowledgment section of package release blog posts at \url{https://www.tidyverse.org/blog/}. If no arguments are given, we retrieve all contributors to the active project since its last (GitHub) release. Unexported helper functions, \code{releases()} and \code{ref_df()} can be useful interactively to get a quick look at release tag names and a data frame about refs (defaulting to releases), respectively. } \examples{ \dontrun{ # active project, interval = since the last release use_tidy_thanks() # active project, interval = since a specific datetime use_tidy_thanks(from = "2020-07-24T00:13:45Z") # r-lib/usethis, interval = since a certain date use_tidy_thanks("r-lib/usethis", from = "2020-08-01") # r-lib/usethis, up to a specific release use_tidy_thanks("r-lib/usethis", from = NULL, to = "v1.1.0") # r-lib/usethis, since a specific commit, up to a specific date use_tidy_thanks("r-lib/usethis", from = "08a560d", to = "2018-05-14") # r-lib/usethis, but with copy/paste of a browser URL use_tidy_thanks("https://github.com/r-lib/usethis") } } usethis/man/use_github_release.Rd0000644000176200001440000000154114717524721016651 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/release.R \name{use_github_release} \alias{use_github_release} \title{Publish a GitHub release} \usage{ use_github_release(publish = TRUE) } \arguments{ \item{publish}{If \code{TRUE}, publishes a release. If \code{FALSE}, creates a draft release.} } \description{ Pushes the current branch (if safe) then publishes a GitHub release for the latest CRAN submission. If you use \code{\link[devtools:submit_cran]{devtools::submit_cran()}} to submit to CRAN, information about the submitted state is captured in a \code{CRAN-SUBMISSION} file. \code{use_github_release()} uses this info to populate the GitHub release notes and, after success, deletes the file. In the absence of such a file, we assume that current state (SHA of \code{HEAD}, package version, NEWS) is the submitted state. } usethis/man/use_package_doc.Rd0000644000176200001440000000221014651000165016065 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/documentation.R \name{use_package_doc} \alias{use_package_doc} \title{Package-level documentation} \usage{ use_package_doc(open = rlang::is_interactive()) } \arguments{ \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ Adds a dummy \code{.R} file that will cause roxygen2 to generate basic package-level documentation. If your package is named "foo", this will make help available to the user via \code{?foo} or \code{package?foo}. Once you call \code{devtools::document()}, roxygen2 will flesh out the \code{.Rd} file using data from the \code{DESCRIPTION}. That ensures you don't need to repeat (and remember to update!) the same information in multiple places. This \code{.R} file is also a good place for roxygen directives that apply to the whole package (vs. a specific function), such as global namespace tags like \verb{@importFrom}. } \seealso{ The \href{https://r-pkgs.org/man.html}{documentation chapter} of \href{https://r-pkgs.org}{R Packages} } usethis/man/use_github_action.Rd0000644000176200001440000000702414651000165016474 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github-actions.R \name{use_github_action} \alias{use_github_action} \title{Set up a GitHub Actions workflow} \usage{ use_github_action( name = NULL, ref = NULL, url = NULL, save_as = NULL, readme = NULL, ignore = TRUE, open = FALSE, badge = NULL ) } \arguments{ \item{name}{For \code{use_github_action()}: Name of one of the example workflow from \url{https://github.com/r-lib/actions/tree/v2/examples} (with or without extension), e.g. \code{"pkgdown"}, \code{"check-standard.yaml"}. If the \code{name} starts with \verb{check-}, \code{save_as} will default to \code{R-CMD-check.yaml} and \code{badge} default to \code{TRUE}.} \item{ref}{Desired Git reference, usually the name of a tag (\code{"v2"}) or branch (\code{"main"}). Other possibilities include a commit SHA (\code{"d1c516d"}) or \code{"HEAD"} (meaning "tip of remote's default branch"). If not specified, defaults to the latest published release of \code{r-lib/actions} (\url{https://github.com/r-lib/actions/releases}).} \item{url}{The full URL to a \code{.yaml} file on GitHub. See more details in \code{\link[=use_github_file]{use_github_file()}}.} \item{save_as}{Name of the local workflow file. Defaults to \code{name} or \code{fs::path_file(url)} for \code{use_github_action()}. Do not specify any other part of the path; the parent directory will always be \code{.github/workflows}, within the active project.} \item{readme}{The full URL to a \code{README} file that provides more details about the workflow. Ignored when \code{url} is \code{NULL}.} \item{ignore}{Should the newly created file be added to \code{.Rbuildignore}?} \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} \item{badge}{Should we add a badge to the \code{README}?} } \description{ Sets up continuous integration (CI) for an R package that is developed on GitHub using \href{https://github.com/features/actions}{GitHub Actions}. CI can be used to trigger various operations for each push or pull request, e.g. running \verb{R CMD check} or building and deploying a pkgdown site. \subsection{Workflows}{ There are four particularly important workflows that are used by many packages: \itemize{ \item \code{check-standard}: Run \verb{R CMD check} using R-latest on Linux, Mac, and Windows, and using R-devel and R-oldrel on Linux. This is a good baseline if you plan on submitting your package to CRAN. \item \code{test-coverage}: Compute test coverage and report to \url{https://about.codecov.io} by calling \code{\link[covr:codecov]{covr::codecov()}}. \item \code{pkgdown}: Automatically build and publish a pkgdown website. But we recommend instead calling \code{\link[=use_pkgdown_github_pages]{use_pkgdown_github_pages()}} which performs other important set up. \item \code{pr-commands}: Enables the use of two R-specific commands in pull request issue comments: \verb{/document} to run \code{roxygen2::roxygenise()} and \verb{/style} to run \code{styler::style_pkg()}. Both will update the PR with any changes once they're done. } If you call \code{use_github_action()} without arguments, you'll be prompted to pick from one of these. Otherwise you can see a complete list of possibilities provided by r-lib at \url{https://github.com/r-lib/actions/tree/v2/examples}, or you can supply your own \code{url} to use any other workflow. } } \examples{ \dontrun{ use_github_action() use_github_action_check_standard() use_github_action("pkgdown") } } usethis/man/use_github_file.Rd0000644000176200001440000000522414651000165016136 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/use_github_file.R \name{use_github_file} \alias{use_github_file} \title{Copy a file from any GitHub repo into the current project} \usage{ use_github_file( repo_spec, path = NULL, save_as = NULL, ref = NULL, ignore = FALSE, open = FALSE, overwrite = FALSE, host = NULL ) } \arguments{ \item{repo_spec}{A string identifying the GitHub repo or, alternatively, a GitHub file URL. Acceptable forms: \itemize{ \item Plain \code{OWNER/REPO} spec \item A blob URL, such as \code{"https://github.com/OWNER/REPO/blob/REF/path/to/some/file"} \item A raw URL, such as \code{"https://raw.githubusercontent.com/OWNER/REPO/REF/path/to/some/file"} } In the case of a URL, the \code{path}, \code{ref}, and \code{host} are extracted from it, in addition to the \code{repo_spec}.} \item{path}{Path of file to copy, relative to the GitHub repo it lives in. This is extracted from \code{repo_spec} when user provides a URL.} \item{save_as}{Path of file to create, relative to root of active project. Defaults to the last part of \code{path}, in the sense of \code{basename(path)} or \code{fs::path_file(path)}.} \item{ref}{The name of a branch, tag, or commit. By default, the file at \code{path} will be copied from its current state in the repo's default branch. This is extracted from \code{repo_spec} when user provides a URL.} \item{ignore}{Should the newly created file be added to \code{.Rbuildignore}?} \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} \item{overwrite}{Force overwrite of existing file?} \item{host}{GitHub host to target, passed to the \code{.api_url} argument of \code{\link[gh:gh]{gh::gh()}}. If unspecified, gh defaults to "https://api.github.com", although gh's default can be customised by setting the GITHUB_API_URL environment variable. For a hypothetical GitHub Enterprise instance, either "https://github.acme.com/api/v3" or "https://github.acme.com" is acceptable.} } \value{ A logical indicator of whether a file was written, invisibly. } \description{ Gets the content of a file from GitHub, from any repo the user can read, and writes it into the active project. This function wraps an endpoint of the GitHub API which supports specifying a target reference (i.e. branch, tag, or commit) and which follows symlinks. } \examples{ \dontrun{ use_github_file( "https://github.com/r-lib/actions/blob/v2/examples/check-standard.yaml" ) use_github_file( "r-lib/actions", path = "examples/check-standard.yaml", ref = "v2", save_as = ".github/workflows/R-CMD-check.yaml" ) } } usethis/man/use_tutorial.Rd0000644000176200001440000000251414651000165015517 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tutorial.R \name{use_tutorial} \alias{use_tutorial} \title{Create a learnr tutorial} \usage{ use_tutorial(name, title, open = rlang::is_interactive()) } \arguments{ \item{name}{Base for file name to use for new \code{.Rmd} tutorial. Should consist only of numbers, letters, \verb{_} and \code{-}. We recommend using lower case.} \item{title}{The human-facing title of the tutorial.} \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ Creates a new tutorial below \verb{inst/tutorials/}. Tutorials are interactive R Markdown documents built with the \href{https://rstudio.github.io/learnr/index.html}{\code{learnr} package}. \code{use_tutorial()} does this setup: \itemize{ \item Adds learnr to Suggests in \code{DESCRIPTION}. \item Gitignores \verb{inst/tutorials/*.html} so you don't accidentally track rendered tutorials. \item Creates a new \code{.Rmd} tutorial from a template and, optionally, opens it for editing. \item Adds new \code{.Rmd} to \code{.Rbuildignore}. } } \examples{ \dontrun{ use_tutorial("learn-to-do-stuff", "Learn to do stuff") } } \seealso{ The \href{https://rstudio.github.io/learnr/index.html}{learnr package documentation}. } usethis/man/use_directory.Rd0000644000176200001440000000127114651000165015657 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/directory.R \name{use_directory} \alias{use_directory} \title{Use a directory} \usage{ use_directory(path, ignore = FALSE) } \arguments{ \item{path}{Path of the directory to create, relative to the project.} \item{ignore}{Should the newly created file be added to \code{.Rbuildignore}?} } \description{ \code{use_directory()} creates a directory (if it does not already exist) in the project's top-level directory. This function powers many of the other \code{use_} functions such as \code{\link[=use_data]{use_data()}} and \code{\link[=use_vignette]{use_vignette()}}. } \examples{ \dontrun{ use_directory("inst") } } usethis/man/use_addin.Rd0000644000176200001440000000122214651000165014726 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/addin.R \name{use_addin} \alias{use_addin} \title{Add minimal RStudio Addin binding} \usage{ use_addin(addin = "new_addin", open = rlang::is_interactive()) } \arguments{ \item{addin}{Name of the addin function, which should be defined in the \code{R} folder.} \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ This function helps you add a minimal \href{https://rstudio.github.io/rstudioaddins/}{RStudio Addin} binding to \code{inst/rstudio/addins.dcf}. } usethis/man/rename_files.Rd0000644000176200001440000000161214717524721015443 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rename-files.R \name{rename_files} \alias{rename_files} \title{Automatically rename paired \verb{R/} and \verb{test/} files} \usage{ rename_files(old, new) } \arguments{ \item{old, new}{Old and new file names (with or without \code{.R} extensions).} } \description{ \itemize{ \item Moves \verb{R/\{old\}.R} to \verb{R/\{new\}.R} \item Moves \verb{src/\{old\}.*} to \verb{src/\{new\}.*} \item Moves \verb{tests/testthat/test-\{old\}.R} to \verb{tests/testthat/test-\{new\}.R} \item Moves \verb{tests/testthat/test-\{old\}-*.*} to \verb{tests/testthat/test-\{new\}-*.*} and updates paths in the test file. \item Removes \code{context()} calls from the test file, which are unnecessary (and discouraged) as of testthat v2.1.0. } This is a potentially dangerous operation, so you must be using Git in order to use this function. } usethis/man/use_github_actions_badge.Rd0000644000176200001440000000106214651000165017775 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github-actions.R \name{use_github_actions_badge} \alias{use_github_actions_badge} \title{Generates a GitHub Actions badge} \usage{ use_github_actions_badge(name = "R-CMD-check.yaml", repo_spec = NULL) } \arguments{ \item{name}{Name of the workflow's YAML configuration file (with or without extension), e.g. \code{"R-CMD-check"}, \code{"R-CMD-check.yaml"}.} } \description{ Generates a GitHub Actions badge and that's all. This exists primarily for internal use. } \keyword{internal} usethis/man/figures/0000755000176200001440000000000014717524721014167 5ustar liggesusersusethis/man/figures/lifecycle-stable.svg0000644000176200001440000000247214651000165020110 0ustar liggesusers lifecycle: stable lifecycle stable usethis/man/figures/lifecycle-experimental.svg0000644000176200001440000000245014651000165021327 0ustar liggesusers lifecycle: experimental lifecycle experimental usethis/man/figures/lifecycle-deprecated.svg0000644000176200001440000000244014651000165020731 0ustar liggesusers lifecycle: deprecated lifecycle deprecated usethis/man/figures/lifecycle-superseded.svg0000644000176200001440000000244014651000165020774 0ustar liggesusers lifecycle: superseded lifecycle superseded usethis/man/figures/logo.png0000644000176200001440000007054514651000165015634 0ustar liggesusers‰PNG  IHDRðÞ«hgAMA± üa cHRMz&€„ú€èu0ê`:˜pœºQ<bKGDÿÿÿ ½§“tIMEç{3Ÿ[pDIDATxÚí½wœÇyçý­îžœggó‹HHŠ")QTT r–“lÙgI¶eßÃùìóí7ÜÙ¯äìs>g[–mZ”må@Š HJ$E"c±9MžéPï5=ig6ïbw1¿I3Ý=ÕÕõëªzžç÷<‚v >÷ÀýîÿêÀ«€_4à_,€7½ù­W»©¬ÄÕn@kGqÿøÐSùløkà÷€Sî"ot¼ÑDÜðAà£ÀumN9"ñ_¢H tˆ¼Ñ!ð6EyÀ€Ow£–Ì‹Á>ü+P€‰·+:Þf¨#®¼EÜû€Ð /•>‹"ò£(bwˆ¼ÍÐ!ð6AÓry/ðÃÀ÷}k¼ô8ð€ÿ œu?ìy{ Càm€:ò&€÷nXçŸ9üð·ÀŒûa‡È[oaÔ× ¼øIà€±A?iß> |(A‡Ä[oAÔWÇ€Þ D7© àßž$tˆ¼Ñ!ðBÓ>wøÁÊ¿»®R“F€?®ü{Ñý°Cä­ƒ·êÈÞü8pËj¯'¥lø»kzÔO¿|˜‡‰· :¾Ê¨#®¸µÏ}-jß»b¸ÄÕu®d€é™YlÛÖDdøjüµÊß;D¾Êèø*¡i¹|øð>”¥yÅp‰+„ 08ÐO"`vnž‘+£Ì§Ó Ç­sÀß¿ <ã~Ø!òÕA‡À›Œ&âößüÊ·»*¸¤  ô÷ÑÝÂc4ª-ËbbrŠ+ccäó`ÍËêóÀŒºvˆ¼¹èxQGÞðTÕ‹Yåsp‰ëñxèí馿¯—€ß_ýÎ%hýÿ‹EFÇÆŸ˜¤lšÀšˆ,Ç€ßþÈB‡Ä›‰7M2¿»PÄ}à_Íõ\âjšFW2Á`?‘H¸%a…RÖΓR’Éf¹2ÊÌì,¶íT]%ŠÀ¿£öÇßlèy3Ð!ð¢…ÌïGQ2¿îÕ\¯~ÿ ‡è'™L kZõ{!@ d29æçç0Ëe¼^±xœp8ˆ¨ë83³³\¾2J&“]ýñ4JéÔ‘-n:Þ´‘ùý(phµ×tÉðûéï륧§¯ÇÓðšm—G®ðÄÃ_!séIüö4abJE#Et÷­Üz×+諞gš&ã“SŒŽQ(«×Z:²ÅMB‡ÀëŒ:òúQ2¿Ÿ^ŠZ>¯Õ}®aÐÝb ¯`0PýNQûxôøÃ<ÿõ¿àhlœ¡„×£!H$%ÓáòŒÍ‰L‡_ñ½Ü~Ç ¿•/gbjÓ´€5Ù•-~ø7:²Å A‡Àë„2¿ŸÞÊÊe~@ã>718ÐO,mK(!<üM.|õw¹go™€ÏÀ‘jïëî…M@¾dñày{_õ1^|ç]8ŽÓ°N§3\¾2ÊìÜ޳æýqeàú :²ÅuG‡ÀkDÓry5™_ÿj¯é’73ÐßGª+‰®ëÕïêÉ$¥DÓ4._¾ÂWÿòðšÝÓø¼Æ‚ãêBP*[|ébŠW}ï/28п€¨¶m3=3ËÈ•Q2ÙlÃw«ÄJ¶øtd‹ë†W‰&⮋ÌÏ%®Ï磿·‡ÞÞ|^oõ»¶‚>ý Mý{{|ØŽ\”lRJtMpv¼Ä•žwò¦w}PMÑuß»ç—ËeÆ&&§X*U~nMÃæYj²ÅY÷ÑW‡¥Ò¯tÐM2¿{©E&­Š¼RJ¤”†A_o79Äî]Cø¼ÞeY†Óé,ÙKO0˜ÐqäÒBàHLêd.}‡L&»à{·]^¯—ÝCƒ=r˜þ¾^ è¶w•8ŠR9}åJó5õi+ÀFéJw$šÙ1”Ìï]¬RæWOÎxLísãñZ n;!˜!èLã1\/ç·Ákè¬)fgg‰FÂ-µL …‚Ø·—TW’‘+£ÌÍÏã8«v;À«QA,ŸAúÉÏ=pG¶¸Bt¼ ´ù}ø!Ö ósÉ UøcWF%üÑ%îrˆ!„ T,â¦ò/ó÷•¿¼šE¹TªZ³W×^!‰xœH8ÂÔô4#£cär¹e½hÚ ‚²¼†šlñ’Ûß"/Îz Ô‘7‚Š[¾øï¬’¼îò³aiÚ[[šÂÊf4÷Z¦£W¢­–{žú×’:žº¥z;Ô/« Co¹Ô_òzøo(kõPYÑt–ÕK£3·AÝà1¨Éü^ǺÈü’ ô ‡ŽYÍ,&¥$žH’ ,{¬j­^ B€i9äõ.âñIJÉWßFŸÏǞݻH%“ŒŒŽ25=³VÙâ1à÷QÛ’O_ÿÜ÷wd‹‹ c…nBÓ[ÿz”Ìïý¬ƒÌ/0T‘ùi ák{ ¸ÿ¯ÿ˜ýÙeWʇ³ +´¦ .M•¸x+o~÷÷U®²ò{sÇqœ-þ-Ê8ø¬ûa‡È踂&âöß‹*Q²oµ×¬—ùõ÷÷Ñ“Jáñ4îs× ×|îÜy¾ù7¿Ìk÷eðú’~`Ó²ùê¥/}ß/°k×PC0ÇjÚàžkZ“ë+[k–ù…Ã!ûû•ùuÐn_ÅcQ"áÓ33Œ\#“Í®¥C(ý+Q.§?ÎíôeõŽ#pqãÀ»Q¢ƒW{ÍåÈü:ä]>ê—Õº®ÓÓÝM<[/Ùbð³À›QA ÌîT"ï¨=p“Ìï (™ßï°JòÖö¹*ö÷è e~,޶²ÅÞu‘-Þ@Ào¤»ÓöÇ;bnz(7S“ùÅVs½–2¿X M[¾Ì¯ƒåCßTI(äÀþ½¤RI._e~ngõ/K¥tª—->µ“d‹ÛšÀMÄDíq?ì^í5«2¿`Eæ—Z̯ƒ•ÁíÒFÙb˜©éFFGÉåòkyqFQc£^¶xy',«·%›ˆAY•¸mµ×t‰ëõzéë馯¯¿ÏWýn{W¸ÿ4ºàÿ¶e‹*CI"ct|‚ññ JårÃq+Ä.à—PnÄßþ Hog"o»=p“Ìï•À_È*Éëî³”1%ÅÑ#‡Ø3¼»J^ØúûÜú%¿®ëX–EÉvH;0kKflɬ-I;Pr,ÛF×õ²l5´’-^ä½=Ý躾Öýñ-(+õ_£2ƒ°=÷Ç[{dÖ¡©sP“ù%Ws½2¿þ~‰õ•ùm4êWŽ”\¼x‘ïÿÏ|ë^xüqŒr½2KÀ’`û\wû‹¹ñÎ;¹õÎ;¬ÄQ¯^´Y÷ "[œEɇm([ÜšO¬-d~߃Š]Þ¿Úk6ÊüzéIu¯»Ìo£Q%¯¦qéâ%þýŸþ‰ï~ùKØã£ø¤W×['Ç’’²mSÒ <}{ÝëyýÛÞÊàÀ޳±á•ëuϰ!²Å³Àÿþœm$[ÜšOª‚&™ß›QûÜ;YåÒ¿Qæ—¢¿¯@`ƒe~·­¶ãðµ/}…ÏýÉS>ÿAMT,å˹‰ãHòâ¾|„—Þó2´º˜ê­Š –-~ µ?þ,Û@¶¸%ŸRS•Wæ÷fÖIæ78ÐOtd~Zbö2Ÿþ˿桿ü?„ Y ]cuSbÙ¹PŒW}øyÛ{ÞÇ0¶<‰›ûdd‹ŸCɳ…e‹[ê 5-—  _/гšë-”ùõ‘L$7Uæ·žpÛkYõ§Æñ?ÿSb¶‰Ð´UÇ9»çIÇaÞãç•?ôüëƒ@×´mÑ?,[œ¤&[<í~¸•ˆ¼%žN ™ßûQFªÃ«½f½Ì¯¿¯—Þîn¼ÞF™ßv„‚ÏþÓý|ö“¿FÜ,®‰¼Õ¾¢ŽÄþïüéŸãu÷¾~KZ§ÛÞCýþØ4d‹n¿­§€ßEY­§Ü·‘¯ú(n’ù½•ýñe¬±šŸaô¤R ô/¬æ·ẺNœxŽßþéŸ&09Z]I¬ËõQƒÁ¶mʃÃüÄÿü_8°ÛÞºÖévýä¶7_(petŒÉÉ)LkÍÕmTñòO¢Š™áê“øª=™&™ßmÀ£4áÕ\o¥Õü¶#ʦɧ~ùW¸ü…%hèë.Tדä,‡ƒ÷½“þÌÆXfšÚ­ )%óéÌzÈ]ä€Fº¯tÛU#ò¦î¦åò0ªÂÁ‡Õ^s§ËüÜÙ÷ñǾÍüô"^TY66ꮤã‰$øØ¯’oºÛ¶·eÖ?{Û¶™š™áJE¶k^VŠ ":ï~¸ÙDÞÔ×kyc(î§Pr¿Èj®W/ó`ßžabÑ(ZfóžØ°5Mjüã_ü%³O>ŽGµÒ ñ/`åó˜á(·ÝyGÅŸ¼‘ÿnÐiJë…èJ&0 ƒb±„µ¶euµÕ{ jy}(~ðïç¯ÿúo7ìžÜãfüHq=À«PûÜWUþ¾bT÷¹ºN*ÕÅ@áP¨úÝfÎRJfçÓ‹ù û ”M‹‡¾ðœÉ±j´ØFÂq<ýƒ¼ô5¯ÁÐõU¤}_>þ ñØÆowêÇF.—¯V“X#‘,૨ýñ—2lÎl¼¡=Ö´\¾ U?÷=¬‹Ì/ÊàÀñXôª†?:¾ûÔcœ~öáå•\4MÛðßh€”Õ=ãFþÆu7¼„›nº}Sn­~ŒH)™›Ÿçò•Qæææ×Ãí”þŠlÑýp#‰¼!j¤&âP“ù ¯öš[YæçØ&¥bní¿/*ƒGª}¨c;HÛ®.5ïª,k‡”Ø%‰Dhš®#t ¡UÚªþ³ÊKKlÛÚ´[©'ïÉ? ¼µ7þ`d#ÕNëNà:ò†©Éü^´ÚëÕËüz{ºéïíÅïßj2¿ZVZYPTÊ%HÛÁÌæ)LÏS˜™§4—ÅÌpÊ&Žeî§ÿÅ7\•{5óE.~íqÌ|Íc`ø¼xÂAü‰®8þDÃ﫪&–K€«i`l%[ŒÇcŒO06>Ayí²Åÿÿ®l1ó¹î_w¯›d~w£Âß@¥ûJÑXÍ/Á`?áph‹g\þ½¹B»l’½2Éü¹r£S˜ù"îó`ø}~/FPÇZUéº@hÞh¡i8¦E9›'?9ËìóBÓðÅÂDvõÛ;H +¶­žK}[ýÙbW2Á•+cLͬ¹Úâ­¨™ø](ƒí7>÷Àý¬ßl¼æžnZ.>ŠJ˜Þµšë5Èü"‘J•ƒ­+ós$<ùã<÷Ôתí^êþÜüÍÙ‘ &Ÿyì•)4]#Ð ÜßM°;Ž7B÷yÑ ]-U¯âJCJYY¥ãà˜V¡Di>Cn|†ìè$¥¹ º×C|ß©£ûñÅÃKÎÆî³>rìÜrË]›º½oמFÙâ\E¶˜Yýñ ð7¨ˆ®î‡k%òª[ÓDÜj2¿ké@¨ÈüúzééNáñlíðÇ•¸ª"*›L~÷4SÏžAhñ}CÄ 芡UîwÙ•º¯Då?RbJdǦ˜9uì•I¼‘ý/:BtÏ@CňVý[‡ÀõíZ [#_X7Ùâï£d‹ã%òªZÒ$ó{#j¹|ë óëéN1°d~Ë%pUˆP(qå[O3wæ2áÁzo9D°'‰Ð´*i…KuæUår}ÿ×ßÛ‚8éÊ‹iþÜãOœÂ.–è»ý(]GöV sr}«¸Õ½o€lñÔ²ú «#ñŠöÀM2¿;¨Éü‚«í ¨—ùõDvÄ>·ñFÕ}8¦ÅècÏ2wæ2]GöÒ{Ûeü‘¡l¹‹e2Ù…b>"á>ßæZ ÝÁ«i¶m“ÍÉæŠ˜¦…Ç0…ü„C~ŒJ-bé8èƒä¡=’1.?ü£>‹î1Hܽ¹î¯uBýØ øýìÞM*™ddt”é™Ùµ„e Ô„wŒšlñ‘Ï=pÿŠe‹Ë"pÓry?5™_ïj:¦µÌ/±£ÂP¹•™S˜=}‘ÄuÃôß~ÍcTgÝ©é 'N]âÜ…qæÓyLÓ^N<bßž>Ž\7D2FnpI)Ñ*EÀÏ_œàÄ©KŒÏ’/”qMÓø½ôõÄ9|hˆ}ýx½Ž#ÕK§;Á®—ÝÂ…¯<Æèã'ð%¢Sñ«ýÖÜ'nú¥P8ÄÌÌ,#WF«Õ$`UD  \÷ZZ¿°·Ó¢¿ØDÜ$5™ß‘µtì™ßr—ÐŽiqéÁ'(gr ¿úÅxÃjÑb;Ïž¸È#Ÿbf6‡ãH<‰×«ú©\˜¦@ש®w½ø0‡¯Ú°ÌÊç,˜›ÏñÐ#Ïqêô¥’òÓú|†¶ ¥’Àq~ŸÁýý¼ì®ëI&ŠÄרü¹Fþ.½·¢ëÈÞ–¿[w Ý®Ü>/›&ë+[ÍÞ¤T²IÄmîz±‡Ûné¦;•`br–o?1ÉñGMææuƒ{^r”[nÚ·aä™Íòï_ú.O¡ Éþ}÷¼$ÂuzC‹Μ›ä¡ãsˆZVž%d‹ ~¥Iæw+Jæ÷ÖAæÅè'¶ q¯›…e[¡›Œ8BÀ³Ï]âó_y‚RÉâÈ!›þ>ø2`ÇñVú­ \æ¹çäþô 'Ÿ× ¼¼ñu·qpmÆ['”J&ÿúÅosêô>ŸämoöñÎûî"9†cÇRá ëò…gøÜçâoÿ!G./8°¯Ÿ7½þEüÞ/åV/éíLàf(Ùbº"[œ_Ùb%[üM‘-V_‹Ÿ{à~>ø÷»Ý üGà¢ÞÕÞ¨r{vïbx÷Á``Q×ÂvƒÆF/35~Xä5}žÎäùâמbn¾ÀÁ6?÷S·°g÷Û)—{±mÇQKTÛ6°í}=‡¸ùÆ OŸe|Â!›-°_?^ÏúÓ Ïø÷S¯xÒ>÷ÀýÍ2¿DÕYýT¹’UÝ”Ÿ×Ë®Á~ØGOª«š{ 7µ# „à̹1&§Òßó¾Aú{_O¹@›Ê²î_›r9À@ßëùž÷õã÷KFÇg9a¢ZWh=P(š<óÜEʦà GÞyß˱­£•YÞij“:”Ëûyãk_Ë/˜–Éç.’Ë—®vo*V[Lqèà>v T á­!=Ñð_Pœüªâ&.o]¿­µÏý[T¯›WóK UR]:¸¡~¼^/¶mméäá› «bݵ,‡C%Çn¼r9‚­Í!Êå(·Üôb®; )—mÎ_¯ôéÚÛ$„`z&ÍÔTC—¼æ•)‚cò¶Êý!Pî]]»ž×½z¿O25“arrþš|ÎîÊÒ¶L¼C}:¸Ÿžî®õ¨&q#*ùüß¡B”½ ü¹·Q+ÃøV¡Ñu&„ ppÿöï& 6ãT”5×:J%“Ù9•âÈ¡ >ïp¥_ôjpø}þNÅEOÏf(—×GÉ#€ÙÙ,¥²E$ìpÝÁÝXV¨îÛ6ç ‰eûÙ7¼‡d¡\¶™šÉ\í.¾jp»¨¡`€}û¹nÿÞªæy Dv'Z·lîíÊ‘¼*.4†?öõtW2´öçJéà86º¾-kª­,ÛÆ´l„$â~–§Ç“@€d‡EJ% ˲×%ÈCÅ’‰ãH#H©UV‹A-áýþá²Õ‹å«ÝÅWŽãT'©fÙb<%1=3ËØø$ùBa­²Åî3Xc0†×ã¡;•¤·;…o‘j~î›Ç±m„Ð6%«ÄV…¦ ´JššRɱ’YÔ¢X´‘R¥ÚÚú-U ]C°,0­2ŠÖK­ Ô8°í2› Ú:fËÜ.PcÛZ@ÊÙ¢®ÓÛ"2>9ÅäÔôZ ]=+îéê>WÓH%:PÙ¬/£š_ÄÖ5½”öz=„C~¤„ós8Îj7³XŸH@ö§8wA¥ï ‡üëj…ŽFƒ†F&#C×M¤\J]%Ðu›Éé+ÌÌ C\=éãÕ@m{Ø~µÒXmÑËî¡ÜWIÀ¨­zY½l×ïs£‘0öíaÿÞáªFw%?®–×.‰½ƒþ¾$š&xúY‹Ññ§0t»B–V}"‘R`6—GŸâÙçl4M0ЗÄãYŸ¼„RJº’Q"áÅ’Æ7Aˆ‹,þbQ³³®óÈcgÉd5‚=Ýq–·%ØŽSÙû.o&­fQ …Ø¿g7öî©–úY)‘—EàZø£á]ƒ\w`_U£»R·P½ÞrÃó-mQ!8¸¿Ÿ`ÐËä”ÁgþåiOWú³™Ä²º\väwùÌ??ÃôŒN8äcÿ¾þumW4`ßž>„€oÊ_ús|õAå6ºþð.z»ãëF7ŒòæöL„™Ïèüî^äÄ©Ïàó¢t&zµMº&ðù¦¸pùŸù­ß?ÉĤN$ìç–›öaÚ5±ºj·ï].ê‰ì©çX_FǃþÁ¼ÿ—–:(•L°wxWU\_ÿã«Em¹Ú–Èkµ:,;«þœÊ›·'c|jŽ™Ù<'NÚœ¿ø<‰øbÑ^oM›¤X>ÉÉç¿ÌÿÅc|áË6–ûöôòŠ—ÝˆÏçY·ˆ6÷¡ ¿ß˥˓LÏHrËzŽT×4@ØG2ÂÌÜc|ùë_äÿÉ%Î_Ðñyuî¾ó‡ RŸ#l9ØŽ‘Xn2¾u™l8×0 bÑ¥R™\>¿äu—eÑ4mC¦H¬– ºðl[¯î¾%Ñh€×¿ê¾üõïrîÂ8‡ïœu Ü,l+Ô ÚN0n °m‡þ.v¥pœíIaÇ‘ËØÄ,š¦ úˆF‚„‚><cÅ™OÖܾ am[R*©öe²J%M„~ºS1BA^¯Mlßv-¦Ùjù¶°- ì¢>èÃÍG-ÄÖŸ•QNrúì(ÉD˜H8@0࣯×O_oËr(J¤³&§æ)[†N8ä'Qdžîê@€–åwWÝ6Ç‘”ʹ\t¦ ²L A0à%ðù 4¡a;¥²ÉôLšt¦Àøäû÷ô±Ugbé8ØŽ’nÇ%s3¶5¡n6vìŠP~«¸„>8È|:O:“gj:c;x}"á@uÖLÓ&Ÿ/‘Îæ›Å²l<^ƒH8@$ à÷b®¸€êŸËé÷%P.[äòE2™ùBIå ö{‰„ô÷%ñûÔ kÛ¥’ÉÄä<éL\¾ØÐö#‡‡ý[.¥ÎNšuë±í ì¢ÞÀåH‰®é[vo,¥ÄëõÐÓ£§;^)Qb‘ÍIgò\º<‰iÙx=á°_:äg(Ú…@P6-r¹"ó™<—G¦°ŸÏC4 à÷yÑuw›ÑÜOê3ÓR/…L6O._¶|^‘°ŸîT¿ß‹®kضrOÌ‘Îä+ä¿ßC4¤§;Vyè›®šZn_×gÜ*íZ/ìÃÂÙX8š®o9©[m ”ÏçÁï÷êŠ"¥¤lZjÖÍä¹26ƒY¶Ð ]í‹£A"¡±X!Ôì™É*òOÍd@B  fÏpØÏëÁq$ùB‰L¶@.WÄ´l<ƒHÈÏÐ@Á€MÓ°mEìѱYÒÙ<Å¢‰ øˆD ô'ñû½º†[-ÂÅVS(5‡Cn•v­'v]Ôòm©·®¦éU"oeÔg~ôz ¼1ƒD<Œ”˲3&§Ò\™F‚`ÅЉHÄUŠßRÙ$“)Î䙘œ¡LKº® ùèëM úÐuMÍĹ—¯L“É(—ÍŠÍOW"B8Àçõ,˜Ñ b«îs]¿®³qio¶ v$a¡¥Úq4]CÓ¶>‘ëQ+Œ®‰EC€T†®Š+gn>Ëèø Hð¼D#A¢‘ÉD¤:C뺆a蘦Zª_ºõÇž C{ïdzüy ¹µ4_ŠÄbÉhpû¬È$B'î"Õ{ÃÀ¶¬%ﳃG‡ÀWízÕç x¼Aü8–YÄ‘vúŠmz)ëþÛHàÅÙ¬Ä@¼‰µø}t°yèx Á%„e¹|þQÒ³—Ù QX.2s#ä2 ì¹ÃØÞÅËv :ÞrÌÍ\¬—j°ÈV€”’ùÙK„cý$»°•^.×*¶ÎèèP‰ ¹iÜ\Y[ JC\k_WoATÝ3[[½}×:ÞŠèLn,o1!Ðô­mšÐ´S”|»£Cà-A8Ö¦Õˆ®­ò¯”MóŽö²5‚K:ØÚ¯ú ‡å´µc,1D©ÿz¦ÇŸÇ¶ËW»9U©ÞCDâ•O¶V¿¹/™­füÛH\Ó¶mÉå+# Ù­7¥N‰^Jfºƒ|[X!…ψ1“јÍ>µ»g!„FOO‰Xôj·dSqMØ‘’‹çO2zñ™-øÖÞZå`ÜÐÏ­geÓ þ»ÞD2ÛÖ7WŠkšÀ Ò±¯î ××Î0\;¤cs-ö؆ØÝ‡\ͤéYɼԞHÅo½Ù®ƒå¢¶ÅKWM|•à8*Qßz· !°Ó49þÍãÌÏÌl>9¤Äã÷ó’—ÝM$ÞÜßî`ËBÓ4NœxŽÓÏD» /l)á†[nfß¾½ë¶Ìß°%t©lò¯õ×äŸ{]×7­“@E ™‰‡^O,ÅqTf*‚ô­_ÜÁF@ öëNÅÑPK¤ðøCóÍ?ûcÆæŽI€œ#á§þìǶíu¹æ†î=Dt¶Ù}%ùŠHÞ,ç™™;µƒè»C T2ý»¹¤Z$[UåÎYìÊ•LÄñû}Qœ¯ƒå@€c;LMÏ`YV[;Žz»$€¢vÌÂŽ%ðzAÁÐÐÝ©®Î,|Q*•Ig²XÖÎ"àZÑñ§,òv°õÐ!plctÜAÛwÐÁ6F‡Àt°Ñ!plctÜAÛwÐÁ6F‡À;nЉ®×ìíQªÇjº¾ø±q~«G'k‡B‚éÙyýö ’‰(/ºåHÛL*ƒÅ7¿ý,ùB‘;o¿HxùRK!å²É#?ƒiZÜyû „‚eŸßÁêÑ™w(4Mã̹þãÏŠßû“Ï`Y‹ È‹¥¿öÛÉÏþÒo3:6¦-?ä_Èæ üê¯ý ?ÿË¿ËäÔÜUÉxq-¢3ïPH$‘pßv”#×íA,AH]×¹ñÈzº“+nH >¯‡×¿úNò…áP ª»í`cÑ!ðŽ… X*sáÒ(ÉD´e(w}Â?Ç‘\ŸäÊè$¦i!+"q(à§>öÁºk_íû¿6Ð!ð !e-û;Y[ý1Шa­ÿNÖ+ß*‰[][ÊÊ|ÖtÌr~Ãã10 ½š Q}š¶°ýÃÀcHÀ®dôt>º|ÔZµ­z#ªZ›¾q¯§Ú»ð>ê¿ëd ]:^4Mðo>Ég>ûU^uÏ‹¸ïÞ{X[…œ=?ÂïÿÉgØ;<È}ß[ñFõü/~õ1øüC\h/gÏГJàH‡é™4»†zyþ…‹¼ó-¯äž—Þ‚ã¨k—Ë&¿ÿ§ÿÈøÄ »{9}ö"7^€çž?ÏÁ}»øð‡ÞRM(„à_¿ðM¾ôµG9tp˜þÞ^¯‡ÿõ[I>_`÷P?Ïž<Ã{Þö^~÷­ÕßЄ bZÿò¯ßàü…ŽÙÏs§Î±wx€¹ù,eÓä'?ú~R]ñêKàÙçÎñGq?{w0;Ÿ¡X,ñ‰}?=ÝÉ}“Ëøí?ü4ÅB‰þnN¾Àáë†9uú"{‡˜ž™G×5>þÃï!íd]:^„\¸4Æ¿éa†ú»o\¸TB07Ÿáó_y„Ûn>̇¿ç>@•<†Î™s—ùÜç"›Ëóøwžcïð¶ãpydœ¯?ÀÃ~—[o>ÄËᄎNxø[OñÂÙËÜpý~yôiÊe“¯?ôf_|#?ø=÷¡ë¶­ò Ÿ<}ž>ÿ¦eñÜóç(•ˤ39ææ2Üxô_{èÛÜõâ›*³œºÛq8sî2#£ؖ÷Ó²yðø“ÜrÓuŒOÌP(–øè¾ Ã0°,!S³|îóqÛ±#ÌÏgÉäòüÈ÷¿£ÚRJ4!Ð ˲ùúCß&Ésøº=~ç×~†@ÀÇW¾þùB‰@ÀW½®¦iŒOóÍGž$™Œñ ÿù‡°›p(ÈÞõzº’1Þ÷Î×…)JM‹†;ä]&:^„€ËW&øê7gÿÞ¡–†!óóY¾þÐwÈf HGbK‡?þ‹ûùæ·¾Ë_ýá/óæ×ß­ EZm–B(B *ËZÁÈ•IþûÿûG\h/þû¿D0¨ìkUffÓ¼÷þ Ó3ó»ñ ñh˜ƒûvqý¡½ô»|ñ«ßâŽÛŽòñ¼¯×SÝo:Žl¨`Ù<þ —.óÃßÿ6ŽÚ‹mÛ ö¦?ý‹¿Ågþå+üú¯~‚}{éîŠóúWßÉôÌ<¿ò¿þ„ùt†ù·WûCÓ§Ï\ägÿûïðŠ»oãóç1 ½¶ïu¤êGåmDªÕÀµVepµèxz»“Ü~ÛQv ö¶œ]÷Í‹n½žëì®–˜yí+ïàÀþ݆Î3Ï!à÷Q(–ðxÔ#0M‹Áü>/ LBáp»ï¼™ÝC} ’;ަ ^tË2™¿_Uò•˲‡ƒ¼äÅ7q¸2£ÛÕ·öÒp¡ë:Çn¸Ž¾nü~–e×Õ'®áº»¹ûÎcô¤HÅ7,ËFÓ4n½ù¹\¿ßW×FÉ@7ßû¾7²oÏ gϯ×K>_$ô“ˉD‚ôv'[å:hW!`v>éӸ㶣m\-‚B¡Èó/\ à÷!¥D×5>øî7 :üç÷ó[ð÷Ü}×1žøî)†wõaÛjüûŸú/=¼OY‹… X,qâÔ9lÛÁiñ¶BY‹_8{‰Ù¹ ¥²ÙÐŽ|¾È³'Ïâ÷{—të8¶Ã™ó—¹|e‚R©ÜÖtet’gOža>­Öƒ¨—Ëé3—Hgr”ëÚá8ûö ò‹?óCÌ̦ùÈÿ*ÙlžƒvóÈcÏp׋oäø£Osïk_Â/ýìGªçµKM×A#®i‹ºÿ. ~Ý©8¡P mš,Ç Õ•  W//Q³Q8dh°‡T2Æ`7=ÝIÛÁq$º¦aš& Ð4a褒qb±0¶mWü³§b’¨ep"E†ÞXç1 º»â˲è ME(ËèºÞ–ðápTWŸÏ[½·â@2Åã1ªñÐ@ƒŸYA_O¹Ê5†zH%ÕŸ‰x¤-fzg^&v,5Í@7¼´c™ û¢²/EŠP0€×ãi{eMÓýøý¾JíCÇqxó^Æk_y†¡cÛŽ Y¬\è/<Äÿü?çï~=¯{Õê:!?šüÚoþ%3³i†w÷óÜóç¹õ¦C<óÜû»ñz=ƒþ¦h+‰¦kƒµ,_‚AÀï#ô/FéózºÞpÿB¨ómÛAkS{*ñÿÒÇAª¶Y–­|Ô¶Ã|:ËÏÿòï¢ë:?ó‰ï%‹¬8–K÷øÁI·Ü?K)ÑÐ0<þ^ukcGXJ‰nxˆ&†€Y¤tDâƒLg¦¨F+,µ„Nsòôy±ýºXâÔé „‚þkª”Ÿ×ƒÏëYpm]Ó¸2:É·¾ý,¯~ùíhBP,–yþ…‹‹eLËâÊè¹|G†PÐÏ#=჻IgrÕ¥kµOe)êôy¢‘à‚vhB éRJGb;ç/ŽridœRÉl»„Ÿæäéó¤3¹Kè³F˜Oç(›æ‚ó”ÑM-9hš “ËóäÓÏc:¦iѶíŸ:áh  ó•G*ꞷĈŽôì¨0ÏI`ÉîŒ9¦'ÏàØîÐÖ4ƒXr7©¾Ã\¸ôÍe_OJèëIq×í71¼«¿ÍøRF¬;o¿{w5¼8[:R2¼«Ÿ—¼ø&úz»p¤rù¼ø¶£ öwóºW݉ã8„‚Ò™œrñ¼÷^‚?¶ecÙ6½=]Õ½²D-×o»å×ØÝ .Ð4 —FùÒ×eïî^ñ²ÛÐuë糧+¾h,ôÞÝýÜ~Ëõ$Ñê1j5cpÑýäòEµ¼nÂb÷î¾Øn;v]×ðxŒ»‘$à÷G‰wßÂèÅ'(Óª:!ª¬ŽÏ£×-x}c+G[K` ¹ä›T…ê-cD.ë¥,¤D7¼ôユH|ˆb~G:¡áÄEº±mEoc!àòÈ8_}ðqöïD×µš/µ2aèºN:“ã+ßxœt:W%” ¤:3¸î ÷ç5]ã…³—ùêƒs÷7£iéLޝ>ømŽÞËO}샄‚é UÜ@®ʦrêÜ/åÎzðá'”QItM͸†Çà…³—ù>ùxÝ+ïàåw߆m;<ùÝç¹xyŒ|¾Øv<}ö’·¿å•ÕX‘Ø|ç©“*b«dV?s÷¾Zå·ÝsêûA×4Je“‡yaPþsUˆ%†ðù£ä2ØV Ãã'éÁŒ©£¤DÈe>ùeïÅ墩à äÒ\X)6ŒÀp<^Ò†ÑúÀŽã`™¦"ú’]$0<ž%+¬Kéàx|Òh„£}jyÕt5»ZzeypÉM7äcy77ÙωSç0tep)•Ë~òù"Ãàc?ônúûRè•eªKŒ`ÐO.W  ’/«¾Y˲¹õæC|ü#ïæ¦`Û6=ÝI~ôÃï¤+ãô™‹Ø¶C à'›Ë‹†IgrDÂAö,p¿8R24ÐÃOüèûéëNòüé 8Ž$ô“Éæ‰„ƒ|ü‡ßÃ}ÊŸmè·;Ìî¡ÞÊÒ¿u\w`˜WÜ}+=©ÙlžËW&xÑ-דÏ™˜šezvžÝC}t%cd²y.Œð{•EÝqðù¼ärB¡@å…!ø¾MÓªþîÕÂçàóGŸ‡3ç.sì†ëxø±§ùñy/Ÿøèû±-GJLÓâþÆ_ðí'ŸãÖ›süѧ¹û®›yâ©S ïîǶm.ŒóG¿õ_¹÷µ/Á²,,˦·'ÉýÈ{ÈåŠ|äÇ…3çG8zx_uŸüàñ'¸ã¶£|êÿþUr}[ú»ùø½›¹t†øèÿ`bj–Žìç?ÁýOæ?ý؇0MK½@l‡gOžåòÈ…b©í |þÂýÎ Þñ–W¢ë:O=sšŸøÙ_çØ×15=G._`>å‘ÇŸáÿú…òη¾ŠgN¼Àýô¯qôð>fç3 %®;°›ã~——ÜqÇ}š7¾î¥üÊÏÿ¢a%±z,l¼ã8¼ê ¯åÈM7.=± þþ¸üàWñêZÛ±)Óvˆ½™÷|øÐ5Ŧâ]»w­[qoØ +ߧΑ#‡=N×u<^_3tâ6ÈEzUT–~G®?ÂÑ£G–Õ ë3êÚP‰J²lÃ0Ø;<@wW¯×@:’þ¾nì"‹4øn…ô÷¥Ø·g¾û÷ Ñß›bnO†¾nÇÁ0 ¼¶e7`Üà‹ûw úÙ;¢ö~šÀ±%†GohcÀïãþó"¥\Ôˆô}ï3zÏñx”µøÆ£ø“ßþt]Sn¬ÊËÇqdõ˜ŽìãëªVo*J¦j?ØÆ2–´k…»¯^Öª¬rœ\|k[ýNEº-½ÜvÛ±^ØÑV肇¼XÞ§f­n}xáR×nUÏØÛÂõ´\!]WžÖm¬ÿv+ÇÀSw¾¡ë¡ÅûA×uøÒÁº£“kXÎtµoÙÍŒ@Zì·Öë;U‹;è`£Cà:ØÆè¸ƒ¶1:î ƒmŒ;è`£ãFZnšÔ£bÙ^•øîšÑ!ðR’t&Ó¹ƒÍ‡eZ낸SÐ!ðÀù —®v3: Sñ°:^n쳫+íàê£Òˆ—€/AÓÁÖDÇ ½LT—onš÷ÿEÓ÷mÏSÇWjH÷²~çµoëòÎí`{¡3/BÌB‰ÜØv: Žƒðùñ÷tèŠ-(6uÅ»I~jŽâä4”‹ ±(¡þF‹ô3õE¿ò“•óJEÐtŒXŒP_ ÃïmÙN÷\Û²ÉOcÎÌ‚e‚áÅÛ• Ô“D«K¬ÞÁöF‡ÀËDnb†òÙØÓãÐwЋah¤ÓyÎ]cf¬‡Äá} Ĩ©l1{ò IgŠëwùG L³Ìèè,ž¸‚ÿÀA‚©ØÂóL‹¹çΰ§8²ÛGÄ=olŽ OŽT΋7Ñýÿr6Oú¹Ó „rìöáèó%.^žbôJ˜è‘ëð†üït¼*Ù* ³ì³§¸çER=5Y\O_€á½O~g’ § yýyžt$³Ï½Àè,7ÞœÄ0j;–Á]!væyø‰çн7⋆”^·zÞ„g¸ñXëóŽç$EÏøc¡j;…X%“ô³'¹í€ÍÞÉ‚îqæt†'ž=IâØ èÞÎãßîèìC…L¹s¸åz/©Þn^2)UŽ,ÃÐ8vK‚hi‚ÂLºf¥KNsãÍq•Ï‘•ó*eZ‚Ü|P'sîb¥V°ÚSç§æH:SÜx¬ñ½C^ŠÓÔ[¬K“SìöaZ“x^}/5£úËi¬bɽ*ÅÉ)öí^xuç  ˜iÌB¹ú{Že#ç¦Ù»7TÉ`B㊠Âõ½{ƒ83Ó Õ ;Øžèx1°KeB~0<­g+Q ÑŠD d¹XÍ'åᨧv\ x¼!ŸƒU2+çID¹D$j,y^Ð'±Kö “@@¯¤¶in«j^0d`PFv"›¶=:^ R%7³¬öé|ÝÏ-SÒP§E€Ô4,sñYÎq$–­òBC…dBÃ4wõÈÊy¢.e¯Ð¶ØÎâçÚ–ƒ#µÎz Cà%à ˜/d3f%y{#9\ŒŽ•0¢ÑêBôHŒ±±RËëºIÎ3ó&Ó‡áÅ=]ò¼tÚ$czñëŠi{ LOˆ©I7-¬\x®““%l_}Éu°±èx >R}<ûLÇ–  Æ®ä¹8ã!Ô—ªMÉRèæì˜ÆÔD‘æz_š&°L‡gžÍ ÷ö£{ôªu,Ôß͹qÉñBëó,‡gŸÍ ÷Ô‡zw‡xöT‘BÞZÐVMä2&'N— õ_í®í`Ð!ðRÝ3Àår’G™a~®\±ìJ̲͹ÓŽ?Q¿ÿ †ÏSõ¤”x>¼{ðÍÇó\8›Å,ÛPÉ <7SâøñÆé!²«×­ÑY;oßAþv gž÷ÈñÆœn"»ûªç¹m ¦â“Ã<øÍ9&Æ Ø–2]Û–ÃøhžžÇìÙC ]u—t°uÐq.)%𮓼á cF}dŒ¨7‹®C¾$ÈQÂ×+,V_€Po’‚÷={‘àé4!ŸÄ´!]ö¢÷ì%±»¯®n¨×“ è½GÏ]$øBí¼LÙ‹Ö³—Ä®Æó*!ˆí$7æçO]&¢Ïáó@Ñ„¬Ä·ë0±Þ®Êñt¼·9:^®«HÓuâû‡°wõaæK”mÃï¡+àkFY@"‚?~=f¾D¾d"tXÐîÕ+%{Zœ+%þxß±Æó¢‹W׎ð@Š`O3_$gZ艠ÍШó1]íîí`Ø2ÔŒ.Òýhˆ‹¸j“…¨[¢ê=ÖÔmK…$V¾÷†üª+0íÔ"¨ZüèªÎ«†U:ªˆ¶/llJ¥Z`‡¼Ë„¨UŬŽËêçW¿¯Ý™BÓ4lG’µ%%ÝÀözÀc 4 é8`šhe¿mcKUm©o…-©<˜5*t–:)?ÔzŸÇ2”N; ÁZï­~\"%[RP6÷Àý‹¾²¤”ôõt³wx×Úû¹NqóäSßåk_ùvyŽžî(€•¶¬±ˆ·+¨—H …2cóÞ¯zõë¸ùæ›Ö4;ŽÃ™ó9wú©j1è¶#$_˜o¾“Þî®Õ]¡Rïj~>Í¿øEN>û1ƒd2‚Q –YP© é8ÌÎf™œÎ3¼ï(÷Þ{/½½=kš`Î_¼ÌèøÄ’çnÚ ,+õŠÅ2ÿöoÿƳO?Â`_”p(¥+«‹Ù¦óÀÝxü^ö w“Éøì?ÿ5çÎão|#~¿·Z ~¥¬¯·—s§-23—®þ†¦ƒÕAJz†Ž‹®Î5æ`;þÿø™¿Gš3ìß“À0ôФõlêV:Ô„ Õ!™1:þ<ôç¹ïmï↮¯·Q³ñ¦ؽB¡Ä?üç½t‚{Rhš¶bâ9Ž$ pÝ~'Ÿ9N:æ½ï}/~¿oEUu×üt÷îa~êRn⧃õƒ”¡{èí߃Ϸò Ž.yOŸ~øû¿"ƒDoªªà파ÔJéJ„ÐìO’Îäùôßÿ9¥Ò{¸í¶[7ôþ7eã'*õsxೌ<ÇžÝÝ |˜îò¤]WU¿wÝ:šÆ}½\¹ô,ÿüOÿŒm¯Ž|š&èíÂëlFWt°îBqzz*žŒœYG##WøÇøº“‰xXI/©5 »¦ëÔ9²wW”¹ÿÓ<÷ÜÉêXßl8ÝNúÖ·åÌ©ï°k°«òyc !(–ÊLÏ嘚É35Sdj&Ïì\ŽR©¼ ë„:O°O/ÏŸ|œ‡>¾êŽJÄD“ýt2?o/¸c Ù½‹hD½€WòW«Â"÷ßÿDC6ÑHpÁŠÐ|2™<SjŸ;9S`r:K:“WE½«c“jöPÐÏP_þ§`zzfÃH¼áKhMÓ˜œœâ›}…¡xƒÑ©š¦l2=W$Ñ5ÄM·¡··ŸÏG±XbbbŒ3§O2>9B2îÃãñ,8ïî.¾þÕ/pøð!zzºWlôû¼ôö 3=úŽcu–ÑÛºá£¯Ï …îÒùáãÇÉΰgWªJÞúun>G±l0¼÷({öî'Wc8“IsñÂyÎ=‰NŽx,Øp®ãHâ±óéI¾ð…/ðÞ÷¾gCÆÕ¦ì?ŽÏ(áótR6[ S0xù«ßÆÑëV«Ä»ß3I'¾p»@ЦHu÷®øL!33³<ñøqú{c•òúä’±‰yºû¯ã­¯~½½= VŒGÞÀììKyèÁ¯sî…'éI…ª¾`7Â`’“'žàâÅ—0<¼kÝgá _BÏÍÍóÂóO“J†–Í®Q+7xÇ»?Ä-ÇŽa:¶mWÍïŽã`Û6†apìØ1ÞöŽ/ûªKj·Ó•«+ÎÉO13;·Â©þŒF¢$R»±aû•Öêi¤z‡ ƒ+>WÓ4ž~úi4òøšD(cóìÚs3ïz×{éííAJ‰mÛU˳ã88ŽM"çMo~ 7Ýúr&§s ›p)%†¡‹è<öØ£ÒJ`MÓ8wîÒÎãñM†(‡ñ©¯»÷­ìÞ5T­{Ó<{V}m¶ÍÐÐ ¯yÝ[˜M[ê%P×Q>ŸA‘NŸ®¾W¯G§·Ýð­øÜ®<¾ }ý»ÑWø¼…˜¦Å©“Ï’ˆ¤ÊdòÂýÜû¦7áñ8ŽSý®ù:îvíe/»‡¡á˜Oçz¤’μp’l6·î}°¡–Ο?K8äiè$€¹ù !˜›Ÿg~n’€ß·àºÓsEîyù«Kº&Ý ®k¼ôî{(ZžÊx®ãóy(—2Œ/Ðh¯J`Ë2™™šÀß"yy:Sææ›o]‘uÎ]ú½áfòE§é;}LMcšÖªÚéê¶IA.W$_(-ù@˦E>¿ôqŽã/”–¼)%ÅbyÉã„”J&ö/Dw²,{Éã,˦\¶–<ζ%ûƽ—l¶€³ŒÂ|:‡Ù¢ŠT=}{øV¾bB0=5 ²¼`ö.Ë„"ÝìÝ»wÙÆP—ÄÝÝ)úöP(–›¾×ðycccë²»¡.—MŠÅ|C^c¨D¶h>úûûVd1v}OOšî¯.m\x<:¥BŽr¹¼¬ë5C×5úúwáñ…Zü6äò%.^žlPNµ‚iZ\^’pŽ#›]t0»ŸœÇ²–NB795O©’ ¯}?*ëj._Zò¸L¶À|:·¤{&—/25^²}¥’ÉÈèÌ¢fB÷sáÒdÛ>ô£ôö Uõ­éô<†Î{e¾P¦¯o°´\¸“KÿÀ.ŠE³©¿$>¯ÎüÜÜʺ–GàUÎúÊ µðímۆLJßï_Õu}>†ga«È.Ë^Ý Lv‰÷Ò¼,“ñ…B™bÙ\ô¥ð{±m‡ryñvè•Dvæǹŭ—C` ËJ«_–îRÙB×wÑ¡¾ÏçYôšB@:SÀï÷.º”fç²ü^|^£Å]’è$‹U_ ÔJ¥´0=’Ó² G¢KF`µº&@8¡U÷ë†F¹¼ô*ª¾-ËÁ²\*•)›µ·úòÑN³ Žc/˜A— e^8èeEn¨­a™ðûèéFh Ž×kù™™É´í`)1~/™laÑ¡i¯× P,/1#©`.A`%óKÏèua‚‹&UamG_t–“R-=ƒ~ß"êG%RIgòÄ¢Á%¯7=“!Õ]`;Ðt/½ý{ðzVëU{ÖV[gµm°ªÖ敲ÌÖKÉ¢ÆÕúß*›&ÅÒòV‘ËésóiN>ÃÔôLSäÉâ7èñ_54Í…®ë”ËÒéôŠÛBÒé4¥b~A‡8ŽƒÇëÇãYyLl¥Ñ„ ­ôã]‹bw*ÆôLfÁ=ÕŽSŠF‚¤3ùjæŒvø½ä ¥%W9†¡S6­%ßÌêå(½œœeÆ}›–½d„ã8˜¦…Ïß¾ßë÷Ó¡Ðâ+¯B¡„i©È¨V­†“t÷ôUïe¥‚ÁЂô»R‚Ïk0=5±â` ÷ONL`¢©a*a(j¹ŸwûÇv¦¦g9uú,³sóËúÝ% ìnг¹µm'û)M,»½¡HJ¼‹æ’û8a,¹ÔõrgÉ‚ã(ÍâÇ88Ž\2ý¬iÚ•6.~\.WÄãÑñ­gt÷ùNÍdˆE‚5PóòYÐÕ³›H8 ¬nw'¥$‘HàH}ÁL ú½Ä\…@Ë„…‹EΟ{PÐ×¼£l:¤RÝ-£RJÒ™,/œ=Ï™óÈærËæ,kv—ÂŽã03;Ç©ÎráÒ…b±©s¡ŒBƒä 写CùÆB|çÛNg·ëtå£ËòÄ·% ,Ð çr%wW÷–«…×ë¡§Mo|¸áwñX˜©éô¢íöz C[ÒPäóz°,{‘}«”Ê@gZö’JT!,Ç‘ˆ%\¶­jô%Ž+Ëx½FÛ}­›8s>' Ò.£kÍž›ËÑÕm9ˆ O€ÞþáªFw5RÒÕ•Äã 5X⥔x<{ì±Jö—å]OÓ4žyæ™ô8>Ÿ·á9Ù¶ƒƒ‡þþ¾/ŒB±È…K#<ÿÂYffçª3ÿrgÿt÷–e1:>ÁÉÓg›À4­–Ëj)%û÷ X¦¡áRJ‚A?vy–Ïþó ËòÅ~Û¶m¾ð…/`•gX ¥”JpðàÁ5ù"¥Toõžž>á$õk!—°©®(só¹EÛ-„ÃæÓùEaèhš T^Ürì1t,Ë^òÞª3ð¢÷(—õ†·,M‹îÝ”«DÀß~Õ#P/Œ\¾H4Xô%”Íš lå’„cݤººYÝ⹆P(Äàà²ÙÚ$än“úzc<ö­oðüó§—7 j££c|í+ÿNO—›´ö}>_$ÙÕGWW²Úïf…C§NŸet|Ó²Vl8Eàñ•Þ¼û#Åb‰ —G8õÂY¦ëÞP$ƒƒÄý-}„©d˜§žü6³³sË"ðÌÌ,'žy’Tr¡›'›+’è`hhpMv› ‡éênZ ú0t]íqÛ@JˆE‚Êç¹ÈŒ(„rô«þiß.ÃЫù¨o¿¶ìx©˜Ó´Ðumñý´„B¡L à[”˜®oÔ¿har˜œNÓ•£i¢ázª £Fwß‚Õy/ûIpÃÇHç,š·J>¯]”xì±Ç—5–„<ýô3”‹³ë^<î=Mϸñ¦[ñz½ÕUìóM«ØU ¦5àMÀËÛ5×5Ú‘2ÙÊúýÜ2ÙÚú]Yn½Üú¢;™™-ÖÝ” T*3:Yàmïx7©Tײ–ÐÝÝ)Þ|ßÛ™š-Wg}÷¼©™_ÂHrãM70ŸÎ(;ÒÙó v¤U7 üðV ø6ðqà=À¿æJ®T¿?žš™åÔ g¹xù ÅR©úùÑ£×ÓÝ·Ÿ¹ù,š¦Q*•¹<šãõ÷¾“[o9,mÑv¿?vì¯xõ}ŒO1M MÓ˜™ÍÐÛ€£G®k2±TW7áX­|ÂÉD„\¾Ô6JÉ ¯ ø}¤3í—Ñ5CVyÑL-eÅ’¾`m{`Éò–Ð¥²¹¤«ÆµŒm X²RK9.TÝG­~Õ .ñù= â‚úVÇ’$ Öî~÷ž—¿Š¹´UÙ©€ÃßË{ß÷A‰ø²&µ% ò–ûÞNªï“Séêçc“Yn¿ónfçê<9+ÜçÖÁ¾|øaà›îæÆ¾¼øQà©•^Ùmiš\çäé3ŒMLbY^¯—×¾î Ê>fç2\Ëóú7¾“[o=¶ìý˜;ÛJ)¹å–c¼ò5÷11]bn>C®äåõoxcƒ`b= HõîFˆ…ÖPŸÏ ð2;—mkˆÇ‚*À}w’Ïë©Îdíï_ © Ê—"pÅ'»TŸ›e«Ò§íÛT(”ðz=mgE!ÔR¼lZ„qI©–ÏÝ]Ѫ?»¾½BsÓæ¬Îð°]j<íÛ·—[o¿‡Ññ4cãsþ^Þ÷þÇVJé8€ŸûÞZ!ñtšéÙÉž}ø‚!.\¡lš«%.ÀIàÀ»€ŠÚ›ÞüVÞôæ·ºÍ Üü20²šŽ(Šœ¿x™çÏœczf–ÞÞ^ïÛ˜Ëzyý½o_y›;]JÉ-ÇŽñŠW¿…¹¬—7¾éí+Ë\tMÐÛ· ?¼ð%¤ºbLÍd ^€p(@±d¶Œéua:º®Q\b©íñKú‚Õ ì,j㑎š…»Ž”ð3p>_"XœTÙ\Ÿ×¨&‰k…b±L©d‹†Z#ñc«J›³ܱô²—ÝÃð¾›ñúWLÞÆ~U$~ó[ÞJß®ØsŒ#7ÜD±R¼}•csøuà-ÀoÓ.o«OÇ%ñç¸à"ð߀ûÞ„—û‹õ†¬ùt†l.O2gh×ýØ WLÞúk»3ÙÍ7ß̃‡B–‡7™HKô3Y˜§~ñ'X4È¥‘ÉŠ§õ öz <†N.W$ µB"‘séñx¸­4.ô‘©“H.8Bª8“-,:» mi+´#]}kûcª>àE|ê]6H!À¶%¹\‘ÞîXK’«È1‡™Ù,ûº[.íuÃGoÿž§ÍYܱäñxðxXyëÇ\¹\fbjš‰ÉiJ1Ms¡»eBŸB8 ‰ë¢íÓ©;¡|µöþ à¹Õt’Ê}UæÒÈ(§NŸabjzEa™­®¹Øß×õévüÁ­}Âfçsm%|RJbÑ™l¡­VJe‰.Ë‹’ÏãÑ1M{ÑXg­ ½X:Žd©&wÉ¿˜Å·T2Ñ4­­  PTî1ÿ"~âl®ˆ”’H8Ðâ[I0ÒE÷*Òæ,ÍIì–BCøcer:yú,—FF)•ËkÙçž~x;ð÷,A^X"'VÓ²zø”¥úG€ïVÔ«î Ïå œ;‘é™YúzˆF"BÀµÂmR,%‘¢^0Ó‡‚~ÜÜ^­cw!P‰ç‹Eå/mŸÏÀvLËnkùõº o”½]‰&K±*±Ò‹õy¹l¡zµÔH«¾ÉJø*epÚÍ®óé<¡¿eȬÁ49•&WÔdNÓK]#Õ»»ºÒÚ ¨Ÿ`>i›µc™˜þůê¹q],KÎÑDä3ÀÏÿ„²Š½XvR¢jþ\)™›O“ÍåèJ$èëí&PoàÍ,µx=½ýÃŒ_>ÙP‚ÅÝ¿%*´2]˜–Ô]††‚~æÓùJB¾…¿¡ë:†®S,–ÛX×5$`[º·õ⩺^¤Fí8‹®Ÿ…P.$¡W«$¶B._"h/¨wÕG}=‰–+WàÎä9t`°åsWis†+Âû«—«¬¾mùB‘±‰I¦gf±,«z/«@ åýù$ð `ÁòˆëbEz¬:";¨õSÀQD¾‹„fº7lY6ã“SÌ¥Óôv§èNuáõ4f¦Ü èîî%é"3;‚»þtÇvW"©Ó#˜¦½`9Ys'…˜œš§¯7Ñ’\B/Ù\±b‰]8X5MCÓTž×ÛúÑU äUãO+Ô*´ã¦ÊìÑî7 &!ìJFÚÝʦ…iÚmÝG®ï×ëÑÛ%‘x/]«H›³^¨¶0M“‰©&&§(–ÖdYe_úMÔD˜•×Ū¢þë~(2p½øià…•^Ëí€RÉÝŸ­ÈUï7áPˆ®žÝ4§Ûqêù|ž¶™+¤„H¸ÞÔêT2ð\®H»Á*º®‚GëÏEöÀ®Ñhñ`&I¹láózڶŲl,»}–*®ÙçõTÔG —ÏH˜œž'Uñý6/„NOß0U&~X Èü*AJ—F®¬•¼_Þ ü9k /¬!/tÓ²ÚõU}ø(ð`ÙeâꉚÍå8s¾@<6Go‘phK̆®Ñ׿›‘óOc–jÙÝ™,Õaj:CW²µŠÆë5ðxt²Ù‰øBœÒ¨úŸ]Dl/TÚ ²U©ˆÞº/k3pkÔ^Ž­/"¥š=½ÞöAÅ’‰ÞÆ€åÞÿÜ|ŽX4X)†½ÐèX,› e{Â-ÇhL›³™Ã &œcl|‚ÙùôZ÷¹ó¨Éî·¨ ”Z-q]¬9±{‘O?ü#jYý`ÙYÇjõeTÀw&“%Õ•¤·'U} _Íeu2™"ëefâ >a ±hˆ‘ÑJ%³¥[EA,bv.K2©´ñŸ×ƒãTf¿–3›ÄçõT”K­Éç^sQ/á*qÛ²ñxZ[—…P ¿¿æ l¾WÛvÈåKô÷%Û¸ff2„B~¼^£é¥å¦Í ‹7Ü×F¢~l‹%Æ&'™šž©êÙW9îL૨}îW€2¬¸.Ö­2C‘-T¼æãÀÛP®§­äZnG¹’«ÙùyúzºIu%ñªÉWƒÈÁ€î¾af'ÏS_ÉPùu¡3³ú“4Û‰¤T¥6ΞkëkÔu O%ÅN+K©frå/n·DK¤ÕQ¡–Kùw%,ª¹ÍåK•½mëßÉÊhB´]b;Ždj&îÁÔ߯”µ´9>ï겫¬ û\Ëbjz†ñ‰©µ*…žF͸Ÿæ`ýˆëbݳRÖ50 ü%j­ÿ_ +½Vƒl±…è6wì¦Ûñ¶I·“êŠ03—m9»‚r'¹…ÊÛÝo(è#›+¶4^¯e/.ì×Äâ®$Ç‘‹&C7-]ÓÚ’Üq$Å’ÙÖ¢.„`>#´Õçó%lÛ!iíû „t÷ôWþ¶1¨ß箳Ìoø¿P!ÉÈ‘6¨6RÓ²ú ð«¨`Wõ[îµê‰šÎdÉåò$â1ú{{…‚›> Çcqâ]ŒçgÌápéȺ٩7›G8`>#j5ø%៩é 톭ÇPËMÇqZ’céXÅìê‹Ä8›e ã·í[ËR ÛÍŽêYèK¶÷ýN§‰ÇBèºÞâ…,èêÙE4\©8¸Aϲ—gt|’Ù¹¹jf”UŽ«ðϨðÇÇ©<Ä ®‹ -nÖDäï>ƒÚ¿Xöú¨–Y±Îg²ô¤’ôt§ðW’{oƲÚçóÐÓ·‡É+§qìÆLº&ˆÇÃLϤ‰„ Sý)‰ÇCLNÎW÷†ËlµL/›3ضl¥J`oÙÞ6³›¶D0‡#e[¿ªP2-¼£mÊ bÉÄ0ô¶Xå²J ùZ\_E/ÍÍç8¸¿¿MÚ¿Òb럶¼þ÷J¥2ã“SLNMW³®®rüØÀC¨}îç©(…6’¸.6¥ÀwâɬÜàûPÖêï®ôZõ>¹‘ÑqN>ËxE¶¸ÑËêZº~¡ C+•Ox>oYõÀu'•Ê&¦ÙÚ¤|¯ªºC+¸K[Ël/=Ô´Åf`YY ´uD-bDkL¡Ó.ep&[Àïkí>H§óÕà–Ví E»I¥zÖ驹}_[.»±'OŸadtl­2¿S(À»PâŸM#/l]4ÉÿµGøÔ2{E¨EÅ8W‘-ÎÎÍ·P²¬Üç„I¶I·xñxŒ¶év¼¯×C6×Z¸ k>¯A¡MÖKW<ß®|Œn‚ûÅöÀ• -Yڜϗ*äkáÛE•Dq•W t½&¦Ó¤’‘–¾_•6g˜P ÀzÀ}<î¶bn>Íé3ç8wáùBaAW€IàÿCÉü~ ˜‚†ÉjS°)õëÑ´¬¾€rjÿ3ÊZývÖA¶ØßÛM(¸qûc7ÝÎèÅg±ÌbÃwB¨Yxj:M2nYñ= 27ŸkíN‚`P¥ØQ©e’Èë­é‚[½£Tl²lý½¬íC[ꢤÚã¶ó;ޤT6•A®éûZ¤2ýɖߗ˹\±ªÁôì\ÛJ˜ËDïð)”jȆ͛q›±év±ˆlñ¬ƒl±'ÕEOw¾JŽèõÞ§RÝ„¢ÝÌO_¤Ù'œˆ‡Ÿ¥T¶Ä5+uRsçÇ[º“¤TY“SóíSÖz=m—ذø ¬<¬íSÅ:RÈÚÅc[•Ô¶Þ6¬|¡Œ®‰–.!j%Sü^΂gR©8˜H®éÙÔÜB‹ËüVzYà[(â~–e(…6›º„n…º(¢R…¼Eâ5É/_Qa™k•-¶C($Õ; bah¥×k úÚ¦ÛqKZ6W±«~ðbZ­KÏH >QM¦Þ º¶Dyپ̇Sñëm HÅ’9´r1¹î£p÷Q5mN*¦¢³¤Í1èíßÓ²šårÐRæ÷ºÈü΢d~oþŽ-B^¸Š3p=š–ÕÓ¨Ô!ÿü”lqEz÷À¹ ™™Ua™ÑHxÝfa7ÝÎ¥³aÊÅLÓoCª+ÊèØ ½Ýñ†ó¤T¡Ÿt:¯²ø7q͵—ËVK­g _°¦im­Ðn¶ŽvIÝ-ÛFÓDK?±ŠÀr X —ç*ô°µûTúÙrÙ" ¶t¡©´9ƒ«Ü™ß,5™ß ÷í@\[‚À.ZȆFÙâ²-µ°LÉìÜ<™l–®d‚¾žõ“-&I¢‰>¦FÓ4{+£‘—.«,õ’;!jQY“uRÓÙZe Z(– ø½ öªCG.â ^NjÙveULÓÆÐÛû€ űh¨¥}«\6±-guÙÝsOÍ(É¥a-}¿‰ÔÊÓæÔ?ÃBEæ7µv™_™šÌï¬Bæ·Y¸êKèV¨³ä9ÀÃÀ÷ßWùÿ•4¬U“°Ÿ˜âä鳌ŒŽW]°úeµªd¸§e%C]׉EƒLÏ´.ù•Üβ[*¸†¬V_ºËW»Í,«Äó­ÛìºÂZ‰D%ƒ¤Z,<×ÓnõRQî#mÛŠZ¤S–î®X˧f¬¬â`ýr¹U&Ô5,—¿ƒ²Ç|»¼eÉ [”À.šd‹ŸFY©5;¯5ç}‰K#Wj²ÅU†e6¦Û‰Ó*ÝNWW¤R‚¥ÉXUÉoåóùàÂk‡>Š¥Ö¹¢…ÐÐt­uZ¹T Gmà·BÙ´Ûê€MÓ5`-4ÌAÅ} ¶¼¾[j5nãû //mNsøãtS.òÅîm \B%r|+ð¨“ùmUòÂ[B·B Ùâ¯Ñ([\¶É²¥lqvu²ÅZºñ®Á–év‚º&Èd Õ­NVçG#Aæçs$â!šÓTùý,Ke†l6¹º`Ë´¡‰­øè¾xDÛ%t%s‹Ó‹¥²*þ¥5·G•Ï,Ë öw-(‰â¦ÍI&ڥͩT\FÚ÷å˜Íå[™_ššÌïI÷íLÚzly»h"òs¨è—ÏTþ|=WI¶èóªt;#'±­F«²¦ ‰S3iâ±`S D£AÎ_˜h©ÿu‹ž•M‹@S•D!ÀëÑ)[­}Áª|‰l“êU¦]âÛV¹ [-‘Ýô¹­~³P,¡ëú‚.µÄµHgòéj:é Ò׿]×i­On”ùON2¹v™ŸEMæ÷eÖYæ·YØÒKèV¨ë`W¶ø!à#¨àñ¡^BÖªR,¾¬v¿éîî#^˜öEJH&ÂärÅJèdã@ ø½ Z»“\CV±Xn¹Gö,âJj—dνM-­DRJl[¶ÖKÕÎÖ*y]8ìo‘*¨R2Åëi“™²’6§EÅÁÆðG‹±JEÌ+u1WIÞgPqùïAy;¶%yaÍÀõhš3¨BO_~EæÝ+¹ž»,skµºn§x,Z].¶š9Ü¿EÂ!’=»ÉÌ5'¥àû}ææst§¢ÕÁ¯–º‚PÐO&ShéN }ä %’‰pÃØv÷¡Å6ášÚ"‰è\U«aï´ÒÛRbš­ÝZRB6[d°)úÊýÿÉé4©TtÁÌÝ6'Ð\Õ¯Vskn>ÍèøDµpžûÌVQàÏPÅüλnGâºØ–vÑDäT\õ¿?† Y›lq™a™õév¬r¾ášB@WW”©©4©®šŽXàfò295OoO¼Á÷)%ý^2™|K.z £’|½Å½´ “tï¯ÝÌåÛZY¨­ÊrÕM¨PRÙÄvœ)s…P¹£‹Å2‰x›´9þ½}CÕlšõmËæò*¡ÃÜüZÃ]™ßo± 2¿Í¶&°‹6²E·šÄ«X­lqz†t:Cwª‹Þî.|‹È»’)"ñ^f'β0ÝN‘+ÓKåQFáŸ+£3ªQ“<ÏçSEÏZ² C…K:R.ðéjÕ—ÀÂvV—Ð-`Y6†¡µ$I±’¥²ù\•¼®H â>r÷ónY•é™ áoSÉ”Jkˆw %ú~e~ßDísÿMV m¶Ýx1Ô=˜25ÙâÇXƒl±lšŒŒŽqòôYÆ'¦d‚õûã`ÀOwï0­*z H8Pñ Óðaèx½¹ê$eÈÒ(—ÖCr í´¨ì¨œEŒs¦¥ê·ÚÇæ ¥J¬…çͧsÕ¤ö5ý³›6G…N.L›#Ñ4=ý{ªiw,{Ýe~?¼5ûî8ò™ëÑ4Ï¡Rš|µ7þ``%×s÷ÇJ¶x‰éÙYúûzˆE£Õ™ÏÕzû†¸ðB„Ra¾é|èîŠ29n –»ÄNÄÔËvKën4h)LpÓï´â©¦im+ê‹|§ ``á^\Jµ¯…^så&Š„ ¾se‰íÒæøÃ z{ªj27üqûÜIT*§ßN»î4⺸úùZ7"»÷zJ¶ø6V [tá,]×éJÄé«ì]Ke9þe&.Ÿ¨KÛZ;W vÑrYëúKÛýf»ºK­¾kŽ‚Zëõ€ªì±]Û…ú²a@¹÷¬ëÚãÀо[9z㋘šžazfv­ûÜ"5™ßq®²Ìo³°ã 좎È~”ßøÀËXlÑ…;}^/=Ý]ô¤ºðzUxás'Opâ‰/áØÖ²bóÀnÎÎXoÔÚäô_kB»öª¾“x|!÷ÝŽáñSZ[•+óû ”Ì/;Ÿ¸.¶ÑX;êH *ñüûQ{ä빞KäP0Ho7]ɳóiŽ?øYòé „ØQ&†5Áí+MÓ Dº‰$†ðøÔ"h ‚’³ÀÿFU8s?¼VÈ ×]4ùJ¶ø=¬P¶õS#êêâÔsO0rî `cÊžn'Ô/Ë}8‘äþP²îåÖ¬ÅZf¿EÉüžu?¼–ˆëâš]uDÖ;QËê7±Ù¢ w †m•»ðåbújßâ–€Ç"$íAÓ×T¸Î•ù} ø:[\)´¸¦ 좎ÈA”îø(B¯¸\"ÛV‘ìÜй™Êçê{5f×£Ûe[WÑZ±öUC­¼©?˜$Àð®YƒýjÕüÒpm×E‡À4-«{Q™@~Ø¿ÚkJ)‘ŽM$&‹a###L¿@O*²*ºº[ô$¯;´®™7•`¡ÄÓß}”¡¾Õf/‘ØÌç<„Â1„¦¯õ…p •ÁôO€Ëî‡ò*ì8?ðjÑä?þðÊÈõ~V [À ÔU­#¿¯Äà@’ñÑ+dg/±«§ÑÔ¯í „À*Ì!=Av ´ äX5Ìϧ™¿ÌÞ¾n´Uál8z’P8ˆ¦Õ¾X… =R›ý&ÛPæ·Yè¸ -d‹Ÿ &[|Ë•-Ö XÛ¶™š™%“Í‘Nç†+¢Éê}CÂÍoµŽëhYËݬ&öe´­²p„“81¤0Ðt¨¨Vv®Êì“À—ØÆJ¡Í@‡ÀmÐTmñ«(¹âÛG„,Õßr¯?€¿ïÊrCÌ#¤ítµoye¨W ‹(& ¡Þm¢jY^ñ==ƒJhø÷(K3Ð!ïbèx ¼éÍo­—-þ9Jüýaà‡X¡l\±½²ìÁ"‚GÌ “CH§2Þ·:‘¥úGl˜$q Ýèºÿ®cÔd~çÜ;Ä]/-d‹¿L£l1ºœëTý‘q¤ 1‹Nqk‡\U2âÙ"€I›²²O«óçæPõ„~µÂqêû»ƒ¥Ñ!ð ÐDä§Py¹\Ùâ+Y¶lÑ–H¡aÑ! æ1Ä,𬍶 ‘«û\q,â8¨Üɪ–˪šß§P2¿B}ÿv°|tbýV&Ùâ¿ïEY«Ÿ^Ù•DeèKaP]Ù…)H¡+âlbó…P¿/…Ž)â¢,R8Â@ W»\~žšÌïŸèwMèÌÀ«DÙâ©Éû—5Ed 8ÂGYö6íå¦ïU{¶a‘Ä&¸Ö}î5™ßóÍýØÁêÐ!ðÑDäóÀ¥VmñmÀÒ¹R+¨-«6!éÇ ƒ!fÑeØœý±”` ),"j5Àª÷¹Eà_QË凹Fd~›…× MÕCY©?ÚßͲe‹õûc“8¶ aˆ9 æÑ±–w™ 9Uªo/eÑÞJ‹VµÏ•À£(Õ¿pÉü6 =ð:£©Úâý¨½ÞO¢R¼¬¢JGx(‹nJ ©@ ©Õü°kØ#7W󛘚æâÈz†‘š·Ž²+žuÏ?‡Zü òn¶ˆ™sg¢)¾ú 5Ùb÷J¯åÒTC’ËL ĹóÎ;ÖÜFUQ0ËèØsëSÍïïPÁ×´Ìo³Ð!ð&`ýd‹)Õ#3 T¥Úb °üjXÍï‹Ôd~&tˆ»èxÑ$[| ŠÈw°Ù¢ß磷'EwW§½Ö¶þ3Ó4™œža|rŠbqMélž@ þ‘ŽÌoÓÑ!ð&£…lñûP²Å}+½Vý6 ÒßÛC"k¨&á~ªÔìÜ<£ã“dsk®rp™šÌï’ûa‡¼›‹¯šˆ|=5Ùbb¥×ªO듈Eéïë! 5$Ë«¯r°Æ}n†F™ßŽ©r°Ñ!ðUF‘ àj²EïJ¯åÙã1èîJÒÛÝ Æ'¦*ÕüÖTåÀ•ù} %ó+A‡¸WoÔ9‚’-þpëj®UÝû}…¢*"¾†}î³(ËòßQ‘ùuˆ»5Ð!ðBÓ²z5È›ýë$oGæ·ÅÑ!ðD‘of…²Åu@=õ¨¨²ŽÌo‹¢Cà-Œ:"{QUW([\1:2¿m†·êˆGÍÄ?ܸÎ?ó<ð»À_¡”C@‡¼[o4-«‡YeµÅ¸¦ªùí4t¼Í°ŽÕ Ôd~×L5¿†·)šª-¾µ?^NµE·šß§Py¯;J¡mŒ·1VQmñ𝿷ÓÐ!ð@‘÷£b«¿è«|6ªæ÷»À ÷Àq·?:ÞAh’-¾ø”dñ—QUý:2¿†ÿéÛñèÿß×%tEXtdate:create2023-03-07T17:19:20+00:00ô‰¡7%tEXtdate:modify2023-03-07T17:19:20+00:00…Ô‹tEXtSoftwareAdobe ImageReadyqÉe<IEND®B`‚usethis/man/issue-this.Rd0000644000176200001440000000254314717524721015113 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/issue.R \name{issue-this} \alias{issue-this} \alias{issue_close_community} \alias{issue_reprex_needed} \title{Helpers for GitHub issues} \usage{ issue_close_community(number, reprex = FALSE) issue_reprex_needed(number) } \arguments{ \item{number}{Issue number} \item{reprex}{Does the issue also need a reprex?} } \description{ The \verb{issue_*} family of functions allows you to perform common operations on GitHub issues from within R. They're designed to help you efficiently deal with large numbers of issues, particularly motivated by the challenges faced by the tidyverse team. \itemize{ \item \code{issue_close_community()} closes an issue, because it's not a bug report or feature request, and points the author towards Posit Community as a better place to discuss usage (\url{https://forum.posit.co}). \item \code{issue_reprex_needed()} labels the issue with the "reprex" label and gives the author some advice about what is needed. } } \section{Saved replies}{ Unlike GitHub's "saved replies", these functions can: \itemize{ \item Be shared between people \item Perform other actions, like labelling, or closing \item Have additional arguments \item Include randomness (like friendly gifs) } } \examples{ \dontrun{ issue_close_community(12, reprex = TRUE) issue_reprex_needed(241) } } usethis/man/use_rmarkdown_template.Rd0000644000176200001440000000223414651000165017552 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rmarkdown.R \name{use_rmarkdown_template} \alias{use_rmarkdown_template} \title{Add an RMarkdown Template} \usage{ use_rmarkdown_template( template_name = "Template Name", template_dir = NULL, template_description = "A description of the template", template_create_dir = FALSE ) } \arguments{ \item{template_name}{The name as printed in the template menu.} \item{template_dir}{Name of the directory the template will live in within \code{inst/rmarkdown/templates}. If none is provided by the user, it will be created from \code{template_name}.} \item{template_description}{Sets the value of \code{description} in \code{template.yml}.} \item{template_create_dir}{Sets the value of \code{create_dir} in \code{template.yml}.} } \description{ Adds files and directories necessary to add a custom rmarkdown template to RStudio. It creates: \itemize{ \item \code{inst/rmarkdown/templates/{{template_dir}}}. Main directory. \item \code{skeleton/skeleton.Rmd}. Your template Rmd file. \item \code{template.yml} with basic information filled in. } } \examples{ \dontrun{ use_rmarkdown_template() } } usethis/man/usethis_options.Rd0000644000176200001440000000554614651000165016247 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/usethis-package.R \name{usethis_options} \alias{usethis_options} \title{Options consulted by usethis} \description{ User-configurable options consulted by usethis, which provide a mechanism for setting default behaviors for various functions. If the built-in defaults don't suit you, set one or more of these options. Typically, this is done in the \code{.Rprofile} startup file, which you can open for editing with \code{\link[=edit_r_profile]{edit_r_profile()}} - this will set the specified options for all future R sessions. Your code will look something like: \if{html}{\out{
}}\preformatted{options( usethis.description = list( "Authors@R" = utils::person( "Jane", "Doe", email = "jane@example.com", role = c("aut", "cre"), comment = c(ORCID = "YOUR-ORCID-ID") ), License = "MIT + file LICENSE" ), usethis.destdir = "/path/to/folder/", # for use_course(), create_from_github() usethis.protocol = "ssh", # Use ssh git protocol usethis.overwrite = TRUE # overwrite files in Git repos without confirmation ) }\if{html}{\out{
}} } \section{Options for the usethis package}{ \itemize{ \item \code{usethis.description}: customize the default content of new \code{DESCRIPTION} files by setting this option to a named list. If you are a frequent package developer, it is worthwhile to pre-configure your preferred name, email, license, etc. See the example above and the \href{https://usethis.r-lib.org/articles/articles/usethis-setup.html}{article on usethis setup} for more details. \item \code{usethis.destdir}: Default directory in which to place new projects downloaded by \code{\link[=use_course]{use_course()}} and \code{\link[=create_from_github]{create_from_github()}}. If this option is unset, the user's Desktop or similarly conspicuous place will be used. \item \code{usethis.protocol}: specifies your preferred transport protocol for Git. Either "https" (default) or "ssh": \itemize{ \item \code{usethis.protocol = "https"} implies \verb{https://github.com//.git} \item \code{usethis.protocol = "ssh"} implies \verb{git@github.com:/.git} } You can also change this for the duration of your R session with \code{\link[=use_git_protocol]{use_git_protocol()}}. \item \code{usethis.overwrite}: If \code{TRUE}, usethis overwrites an existing file without asking for user confirmation if the file is inside a Git repo. The rationale is that the normal Git workflow makes it easy to see and selectively accept/discard any proposed changes. \item \code{usethis.quiet}: Set to \code{TRUE} to suppress user-facing messages. Default \code{FALSE}. \item \code{usethis.allow_nested_project}: Whether or not to allow you to create a project inside another project. This is rarely a good idea, so this option defaults to \code{FALSE}. } } usethis/man/create_from_github.Rd0000644000176200001440000001265514717524721016653 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/create.R \name{create_from_github} \alias{create_from_github} \title{Create a project from a GitHub repo} \usage{ create_from_github( repo_spec, destdir = NULL, fork = NA, rstudio = NULL, open = rlang::is_interactive(), protocol = git_protocol(), host = NULL ) } \arguments{ \item{repo_spec}{A string identifying the GitHub repo in one of these forms: \itemize{ \item Plain \code{OWNER/REPO} spec \item Browser URL, such as \code{"https://github.com/OWNER/REPO"} \item HTTPS Git URL, such as \code{"https://github.com/OWNER/REPO.git"} \item SSH Git URL, such as \code{"git@github.com:OWNER/REPO.git"} }} \item{destdir}{Destination for the new folder, which will be named according to the \code{REPO} extracted from \code{repo_spec}. Defaults to the location stored in the global option \code{usethis.destdir}, if defined, or to the user's Desktop or similarly conspicuous place otherwise.} \item{fork}{If \code{FALSE}, we clone \code{repo_spec}. If \code{TRUE}, we fork \code{repo_spec}, clone that fork, and do additional setup favorable for future pull requests: \itemize{ \item The source repo, \code{repo_spec}, is configured as the \code{upstream} remote, using the indicated \code{protocol}. \item The local \code{DEFAULT} branch is set to track \code{upstream/DEFAULT}, where \code{DEFAULT} is typically \code{main} or \code{master}. It is also immediately pulled, to cover the case of a pre-existing, out-of-date fork. } If \code{fork = NA} (the default), we check your permissions on \code{repo_spec}. If you can push, we set \code{fork = FALSE}, If you cannot, we set \code{fork = TRUE}.} \item{rstudio}{Initiate an \href{https://r-pkgs.org/workflow101.html#sec-workflow101-rstudio-projects}{RStudio Project}? Defaults to \code{TRUE} if in an RStudio session and project has no pre-existing \code{.Rproj} file. Defaults to \code{FALSE} otherwise (but note that the cloned repo may already be an RStudio Project, i.e. may already have a \code{.Rproj} file).} \item{open}{If \code{TRUE}, \link[=proj_activate]{activates} the new project: \itemize{ \item If using RStudio desktop, the package is opened in a new session. \item If on RStudio server, the current RStudio project is activated. \item Otherwise, the working directory and active project is changed. }} \item{protocol}{One of "https" or "ssh"} \item{host}{GitHub host to target, passed to the \code{.api_url} argument of \code{\link[gh:gh]{gh::gh()}}. If \code{repo_spec} is a URL, \code{host} is extracted from that. If unspecified, gh defaults to "https://api.github.com", although gh's default can be customised by setting the GITHUB_API_URL environment variable. For a hypothetical GitHub Enterprise instance, either "https://github.acme.com/api/v3" or "https://github.acme.com" is acceptable.} } \description{ Creates a new local project and Git repository from a repo on GitHub, by either cloning or \href{https://docs.github.com/en/get-started/quickstart/fork-a-repo}{fork-and-cloning}. In the fork-and-clone case, \code{create_from_github()} also does additional remote and branch setup, leaving you in the perfect position to make a pull request with \code{\link[=pr_init]{pr_init()}}, one of several \link[=pull-requests]{functions for working with pull requests}. \code{create_from_github()} works best when your GitHub credentials are discoverable. See below for more about authentication. } \section{Git/GitHub Authentication}{ Many usethis functions, including those documented here, potentially interact with GitHub in two different ways: \itemize{ \item Via the GitHub REST API. Examples: create a repo, a fork, or a pull request. \item As a conventional Git remote. Examples: clone, fetch, or push. } Therefore two types of auth can happen and your credentials must be discoverable. Which credentials do we mean? \itemize{ \item A GitHub personal access token (PAT) must be discoverable by the gh package, which is used for GitHub operations via the REST API. See \code{\link[=gh_token_help]{gh_token_help()}} for more about getting and configuring a PAT. \item If you use the HTTPS protocol for Git remotes, your PAT is also used for Git operations, such as \verb{git push}. Usethis uses the gert package for this, so the PAT must be discoverable by gert. Generally gert and gh will discover and use the same PAT. This ability to "kill two birds with one stone" is why HTTPS + PAT is our recommended auth strategy for those new to Git and GitHub and PRs. \item If you use SSH remotes, your SSH keys must also be discoverable, in addition to your PAT. The public key must be added to your GitHub account. } Git/GitHub credential management is covered in a dedicated article: \href{https://usethis.r-lib.org/articles/articles/git-credentials.html}{Managing Git(Hub) Credentials} } \examples{ \dontrun{ create_from_github("r-lib/usethis") # repo_spec can be a URL create_from_github("https://github.com/r-lib/usethis") # a URL repo_spec also specifies the host (e.g. GitHub Enterprise instance) create_from_github("https://github.acme.com/OWNER/REPO") } } \seealso{ \itemize{ \item \code{\link[=use_github]{use_github()}} to go the opposite direction, i.e. create a GitHub repo from your local repo \item \code{\link[=git_protocol]{git_protocol()}} for background on \code{protocol} (HTTPS vs SSH) \item \code{\link[=use_course]{use_course()}} to download a snapshot of all files in a GitHub repo, without the need for any local or remote Git operations } } usethis/man/use_pkgdown.Rd0000644000176200001440000000316714651000165015332 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/pkgdown.R \name{use_pkgdown} \alias{use_pkgdown} \alias{use_pkgdown_github_pages} \title{Use pkgdown} \usage{ use_pkgdown(config_file = "_pkgdown.yml", destdir = "docs") use_pkgdown_github_pages() } \arguments{ \item{config_file}{Path to the pkgdown yaml config file, relative to the project.} \item{destdir}{Target directory for pkgdown docs.} } \description{ \href{https://pkgdown.r-lib.org}{pkgdown} makes it easy to turn your package into a beautiful website. usethis provides two functions to help you use pkgdown: \itemize{ \item \code{use_pkgdown()}: creates a pkgdown config file and adds relevant files or directories to \code{.Rbuildignore} and \code{.gitignore}. \item \code{use_pkgdown_github_pages()}: implements the GitHub setup needed to automatically publish your pkgdown site to GitHub pages: \itemize{ \item (first, it calls \code{use_pkgdown()}) \item \code{\link[=use_github_pages]{use_github_pages()}} prepares to publish the pkgdown site from the \code{gh-pages} branch \item \code{\link[=use_github_action]{use_github_action("pkgdown")}} configures a GitHub Action to automatically build the pkgdown site and deploy it via GitHub Pages \item The pkgdown site's URL is added to the pkgdown configuration file, to the URL field of DESCRIPTION, and to the GitHub repo. \item Packages owned by certain GitHub organizations (tidyverse, r-lib, and tidymodels) get some special treatment, in terms of anticipating the (eventual) site URL and the use of a pkgdown template. } } } \seealso{ \url{https://pkgdown.r-lib.org/articles/pkgdown.html#configuration} } usethis/man/badges.Rd0000644000176200001440000000563514717524721014250 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/badge.R \name{badges} \alias{badges} \alias{use_badge} \alias{use_cran_badge} \alias{use_bioc_badge} \alias{use_lifecycle_badge} \alias{use_binder_badge} \alias{use_posit_cloud_badge} \title{README badges} \usage{ use_badge(badge_name, href, src) use_cran_badge() use_bioc_badge() use_lifecycle_badge(stage) use_binder_badge(ref = git_default_branch(), urlpath = NULL) use_posit_cloud_badge(url) } \arguments{ \item{badge_name}{Badge name. Used in error message and alt text} \item{href, src}{Badge link and image src} \item{stage}{Stage of the package lifecycle. One of "experimental", "stable", "superseded", or "deprecated".} \item{ref}{A Git branch, tag, or SHA} \item{urlpath}{An optional \code{urlpath} component to add to the link, e.g. \code{"rstudio"} to open an RStudio IDE instead of a Jupyter notebook. See the \href{https://mybinder.readthedocs.io/en/latest/howto/user_interface.html}{binder documentation} for additional examples.} \item{url}{A link to an existing \href{https://posit.cloud}{Posit Cloud} project. See the \href{https://posit.cloud/learn/guide#project-settings-access}{Posit Cloud documentation} for details on how to set project access and obtain a project link.} } \description{ These helpers produce the markdown text you need in your README to include badges that report information, such as the CRAN version or test coverage, and link out to relevant external resources. To add badges automatically ensure your badge block starts with a line containing only \verb{} and ends with a line containing only \verb{}. } \details{ \itemize{ \item \code{use_badge()}: a general helper used in all badge functions \item \code{use_bioc_badge()}: badge indicates \href{https://bioconductor.org/developers/}{BioConductor build status} \item \code{use_cran_badge()}: badge indicates what version of your package is available on CRAN, powered by \url{https://www.r-pkg.org} \item \code{use_lifecycle_badge()}: badge declares the developmental stage of a package according to \url{https://lifecycle.r-lib.org/articles/stages.html}. \item \code{use_binder_badge()}: badge indicates that your repository can be launched in an executable environment on \url{https://mybinder.org/} \item \code{use_posit_cloud_badge()}: badge indicates that your repository can be launched in a \href{https://posit.cloud}{Posit Cloud} project \item \code{use_rscloud_badge()}: \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}}: Use \code{\link[=use_posit_cloud_badge]{use_posit_cloud_badge()}} instead. } } \examples{ \dontrun{ use_cran_badge() use_lifecycle_badge("stable") } } \seealso{ Functions that configure continuous integration, such as \code{\link[=use_github_actions]{use_github_actions()}}, also create badges. } usethis/man/use_namespace.Rd0000644000176200001440000000140214651000165015603 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/namespace.R \name{use_namespace} \alias{use_namespace} \title{Use a basic \code{NAMESPACE}} \usage{ use_namespace(roxygen = TRUE) } \arguments{ \item{roxygen}{Do you plan to manage \code{NAMESPACE} with roxygen2?} } \description{ If \code{roxygen} is \code{TRUE} generates an empty \code{NAMESPACE} that exports nothing; you'll need to explicitly export functions with \verb{@export}. If \code{roxygen} is \code{FALSE}, generates a default \code{NAMESPACE} that exports all functions except those that start with \code{.}. } \seealso{ The \href{https://r-pkgs.org/dependencies-mindset-background.html#sec-dependencies-namespace}{namespace chapter} of \href{https://r-pkgs.org}{R Packages}. } usethis/man/rprofile-helper.Rd0000644000176200001440000000157614651000165016106 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rprofile.R \name{rprofile-helper} \alias{rprofile-helper} \alias{use_conflicted} \alias{use_reprex} \alias{use_usethis} \alias{use_devtools} \alias{use_partial_warnings} \title{Helpers to make useful changes to \code{.Rprofile}} \usage{ use_conflicted() use_reprex() use_usethis() use_devtools() use_partial_warnings() } \description{ All functions open your \code{.Rprofile} and give you the code you need to paste in. \itemize{ \item \code{use_devtools()}: makes devtools available in interactive sessions. \item \code{use_usethis()}: makes usethis available in interactive sessions. \item \code{use_reprex()}: makes reprex available in interactive sessions. \item \code{use_conflicted()}: makes conflicted available in interactive sessions. \item \code{use_partial_warnings()}: warns on partial matches. } } usethis/man/use_build_ignore.Rd0000644000176200001440000000201714651000165016314 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ignore.R \name{use_build_ignore} \alias{use_build_ignore} \title{Add files to \code{.Rbuildignore}} \usage{ use_build_ignore(files, escape = TRUE) } \arguments{ \item{files}{Character vector of path names.} \item{escape}{If \code{TRUE}, the default, will escape \code{.} to \verb{\\\\.} and surround with \code{^} and \code{$}.} } \description{ \code{.Rbuildignore} has a regular expression on each line, but it's usually easier to work with specific file names. By default, \code{use_build_ignore()} will (crudely) turn a filename into a regular expression that will only match that path. Repeated entries will be silently removed. \code{use_build_ignore()} is designed to ignore \emph{individual} files. If you want to ignore \emph{all} files with a given extension, consider providing an "as-is" regular expression, using \code{escape = FALSE}; see examples. } \examples{ \dontrun{ # ignore all Excel files use_build_ignore("[.]xlsx$", escape = FALSE) } } usethis/man/use_github_pages.Rd0000644000176200001440000000477014651000165016323 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github-pages.R \name{use_github_pages} \alias{use_github_pages} \title{Configure a GitHub Pages site} \usage{ use_github_pages(branch = "gh-pages", path = "/", cname = NA) } \arguments{ \item{branch, path}{Branch and path for the site source. The default of \code{branch = "gh-pages"} and \code{path = "/"} reflects strong GitHub support for this configuration: when a \code{gh-pages} branch is first created, it is \emph{automatically} published to Pages, using the source found in \code{"/"}. If a \code{gh-pages} branch does not yet exist on the host, \code{use_github_pages()} creates an empty, orphan remote branch. The most common alternative is to use the repo's default branch, coupled with \code{path = "/docs"}. It is the user's responsibility to ensure that this \code{branch} pre-exists on the host. Note that GitHub does not support an arbitrary \code{path} and, at the time of writing, only \code{"/"} or \code{"/docs"} are accepted.} \item{cname}{Optional, custom domain name. The \code{NA} default means "don't set or change this", whereas a value of \code{NULL} removes any previously configured custom domain. Note that this \emph{can} add or modify a CNAME file in your repository. If you are using Pages to host a pkgdown site, it is better to specify its URL in the pkgdown config file and let pkgdown manage CNAME.} } \value{ Site metadata returned by the GitHub API, invisibly } \description{ Activates or reconfigures a GitHub Pages site for a project hosted on GitHub. This function anticipates two specific usage modes: \itemize{ \item Publish from the root directory of a \code{gh-pages} branch, which is assumed to be only (or at least primarily) a remote branch. Typically the \code{gh-pages} branch is managed by an automatic "build and deploy" job, such as the one configured by \code{\link[=use_github_action]{use_github_action("pkgdown")}}. \item Publish from the \code{"/docs"} directory of a "regular" branch, probably the repo's default branch. The user is assumed to have a plan for how they will manage the content below \code{"/docs"}. } } \examples{ \dontrun{ use_github_pages() use_github_pages(branch = git_default_branch(), path = "/docs") } } \seealso{ \itemize{ \item \code{\link[=use_pkgdown_github_pages]{use_pkgdown_github_pages()}} combines \code{use_github_pages()} with other functions to fully configure a pkgdown site \item \url{https://docs.github.com/en/pages} \item \url{https://docs.github.com/en/rest/pages} } } usethis/man/use_package.Rd0000644000176200001440000000442314717524721015264 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/package.R \name{use_package} \alias{use_package} \alias{use_dev_package} \title{Depend on another package} \usage{ use_package(package, type = "Imports", min_version = NULL) use_dev_package(package, type = "Imports", remote = NULL) } \arguments{ \item{package}{Name of package to depend on.} \item{type}{Type of dependency: must be one of "Imports", "Depends", "Suggests", "Enhances", or "LinkingTo" (or unique abbreviation). Matching is case insensitive.} \item{min_version}{Optionally, supply a minimum version for the package. Set to \code{TRUE} to use the currently installed version or use a version string suitable for \code{\link[=numeric_version]{numeric_version()}}, such as "2.5.0".} \item{remote}{By default, an \code{OWNER/REPO} GitHub remote is inserted. Optionally, you can supply a character string to specify the remote, e.g. \code{"gitlab::jimhester/covr"}, using any syntax supported by the \href{https://remotes.r-lib.org/articles/dependencies.html#other-sources}{remotes package}.} } \description{ \code{use_package()} adds a CRAN package dependency to \code{DESCRIPTION} and offers a little advice about how to best use it. \code{use_dev_package()} adds a dependency on an in-development package, adding the dev repo to \code{Remotes} so it will be automatically installed from the correct location. There is no helper to remove a dependency: to do that, simply remove that package from your \code{DESCRIPTION} file. \code{use_package()} exists to support a couple of common maneuvers: \itemize{ \item Add a dependency to \code{Imports} or \code{Suggests} or \code{LinkingTo}. \item Add a minimum version to a dependency. \item Specify the minimum supported version for R. } \code{use_package()} probably works for slightly more exotic modifications, but at some point, you should edit \code{DESCRIPTION} yourself by hand. There is no intention to account for all possible edge cases. } \examples{ \dontrun{ use_package("ggplot2") use_package("dplyr", "suggests") use_dev_package("glue") # Depend on R version 4.1 use_package("R", type = "Depends", min_version = "4.1") } } \seealso{ The \href{https://r-pkgs.org/dependencies-mindset-background.html}{dependencies section} of \href{https://r-pkgs.org}{R Packages}. } usethis/man/pull-requests.Rd0000644000176200001440000002360314651000165015627 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/pr.R \name{pull-requests} \alias{pull-requests} \alias{pr_init} \alias{pr_resume} \alias{pr_fetch} \alias{pr_push} \alias{pr_pull} \alias{pr_merge_main} \alias{pr_view} \alias{pr_pause} \alias{pr_finish} \alias{pr_forget} \title{Helpers for GitHub pull requests} \usage{ pr_init(branch) pr_resume(branch = NULL) pr_fetch(number = NULL, target = c("source", "primary")) pr_push() pr_pull() pr_merge_main() pr_view(number = NULL, target = c("source", "primary")) pr_pause() pr_finish(number = NULL, target = c("source", "primary")) pr_forget() } \arguments{ \item{branch}{Name of a new or existing local branch. If creating a new branch, note this should usually consist of lower case letters, numbers, and \code{-}.} \item{number}{Number of PR.} \item{target}{Which repo to target? This is only a question in the case of a fork. In a fork, there is some slim chance that you want to consider pull requests against your fork (the primary repo, i.e. \code{origin}) instead of those against the source repo (i.e. \code{upstream}, which is the default).} } \description{ The \verb{pr_*} family of functions is designed to make working with GitHub pull requests (PRs) as painless as possible for both contributors and package maintainers. To use the \verb{pr_*} functions, your project must be a Git repo and have one of these GitHub remote configurations: \itemize{ \item "ours": You can push to the GitHub remote configured as \code{origin} and it's not a fork. \item "fork": You can push to the GitHub remote configured as \code{origin}, it's a fork, and its parent is configured as \code{upstream}. \code{origin} points to your \strong{personal} copy and \code{upstream} points to the \strong{source repo}. } "Ours" and "fork" are two of several GitHub remote configurations examined in \href{https://happygitwithr.com/common-remote-setups.html}{Common remote setups} in Happy Git and GitHub for the useR. The \href{https://usethis.r-lib.org/articles/articles/pr-functions.html}{Pull Request Helpers} article walks through the process of making a pull request with the \verb{pr_*} functions. The \verb{pr_*} functions also use your Git/GitHub credentials to carry out various remote operations; see below for more about auth. The \verb{pr_*} functions also proactively check for agreement re: the default branch in your local repo and the source repo. See \code{\link[=git_default_branch]{git_default_branch()}} for more. } \section{Git/GitHub Authentication}{ Many usethis functions, including those documented here, potentially interact with GitHub in two different ways: \itemize{ \item Via the GitHub REST API. Examples: create a repo, a fork, or a pull request. \item As a conventional Git remote. Examples: clone, fetch, or push. } Therefore two types of auth can happen and your credentials must be discoverable. Which credentials do we mean? \itemize{ \item A GitHub personal access token (PAT) must be discoverable by the gh package, which is used for GitHub operations via the REST API. See \code{\link[=gh_token_help]{gh_token_help()}} for more about getting and configuring a PAT. \item If you use the HTTPS protocol for Git remotes, your PAT is also used for Git operations, such as \verb{git push}. Usethis uses the gert package for this, so the PAT must be discoverable by gert. Generally gert and gh will discover and use the same PAT. This ability to "kill two birds with one stone" is why HTTPS + PAT is our recommended auth strategy for those new to Git and GitHub and PRs. \item If you use SSH remotes, your SSH keys must also be discoverable, in addition to your PAT. The public key must be added to your GitHub account. } Git/GitHub credential management is covered in a dedicated article: \href{https://usethis.r-lib.org/articles/articles/git-credentials.html}{Managing Git(Hub) Credentials} } \section{For contributors}{ To contribute to a package, first use \code{create_from_github("OWNER/REPO")}. This forks the source repository and checks out a local copy. Next use \code{pr_init()} to create a branch for your PR. It is best practice to never make commits to the default branch branch of a fork (usually named \code{main} or \code{master}), because you do not own it. A pull request should always come from a feature branch. It will be much easier to pull upstream changes from the fork parent if you only allow yourself to work in feature branches. It is also much easier for a maintainer to explore and extend your PR if you create a feature branch. Work locally, in your branch, making changes to files, and committing your work. Once you're ready to create the PR, run \code{pr_push()} to push your local branch to GitHub, and open a webpage that lets you initiate the PR (or draft PR). To learn more about the process of making a pull request, read the \href{https://usethis.r-lib.org/articles/articles/pr-functions.html}{Pull Request Helpers} vignette. If you are lucky, your PR will be perfect, and the maintainer will accept it. You can then run \code{pr_finish()} to delete your PR branch. In most cases, however, the maintainer will ask you to make some changes. Make the changes, then run \code{pr_push()} to update your PR. It's also possible that the maintainer will contribute some code to your PR: to get those changes back onto your computer, run \code{pr_pull()}. It can also happen that other changes have occurred in the package since you first created your PR. You might need to merge the default branch (usually named \code{main} or \code{master}) into your PR branch. Do that by running \code{pr_merge_main()}: this makes sure that your PR is compatible with the primary repo's main line of development. Both \code{pr_pull()} and \code{pr_merge_main()} can result in merge conflicts, so be prepared to resolve before continuing. } \section{For maintainers}{ To download a PR locally so that you can experiment with it, run \code{pr_fetch()} and select the PR or, if you already know its number, call \verb{pr_fetch()}. If you make changes, run \code{pr_push()} to push them back to GitHub. After you have merged the PR, run \code{pr_finish()} to delete the local branch and remove the remote associated with the contributor's fork. } \section{Overview of all the functions}{ \itemize{ \item \code{pr_init()}: As a contributor, start work on a new PR by ensuring that your local repo is up-to-date, then creating and checking out a new branch. Nothing is pushed to or created on GitHub until you call \code{pr_push()}. \item \code{pr_fetch()}: As a maintainer, review or contribute changes to an existing PR by creating a local branch that tracks the remote PR. \code{pr_fetch()} does as little work as possible, so you can also use it to resume work on an PR that already has a local branch (where it will also ensure your local branch is up-to-date). If called with no arguments, up to 9 open PRs are offered for interactive selection. \item \code{pr_resume()}: Resume work on a PR by switching to an existing local branch and pulling any changes from its upstream tracking branch, if it has one. If called with no arguments, up to 9 local branches are offered for interactive selection, with a preference for branches connected to PRs and for branches with recent activity. \item \code{pr_push()}: The first time it's called, a PR branch is pushed to GitHub and you're taken to a webpage where a new PR (or draft PR) can be created. This also sets up the local branch to track its remote counterpart. Subsequent calls to \code{pr_push()} make sure the local branch has all the remote changes and, if so, pushes local changes, thereby updating the PR. \item \code{pr_pull()}: Pulls changes from the local branch's remote tracking branch. If a maintainer has extended your PR, this is how you bring those changes back into your local work. \item \code{pr_merge_main()}: Pulls changes from the default branch of the source repo into the current local branch. This can be used when the local branch is the default branch or when it's a PR branch. \item \code{pr_pause()}: Makes sure you're up-to-date with any remote changes in the PR. Then switches back to the default branch and pulls from the source repo. Use \code{pr_resume()} with name of branch or use \code{pr_fetch()} to resume using PR number. \item \code{pr_view()}: Visits the PR associated with the current branch in the browser (default) or the specific PR identified by \code{number}. (FYI \code{\link[=browse_github_pulls]{browse_github_pulls()}} is a handy way to visit the list of all PRs for the current project.) \item \code{pr_forget()}: Does local clean up when the current branch is an actual or notional PR that you want to abandon. Maybe you initiated it yourself, via \code{pr_init()}, or you used \code{pr_fetch()} to explore a PR from GitHub. Only does \emph{local} operations: does not update or delete any remote branches, nor does it close any PRs. Alerts the user to any uncommitted or unpushed work that is at risk of being lost. If user chooses to proceed, switches back to the default branch, pulls changes from source repo, and deletes local PR branch. Any associated Git remote is deleted, if the "forgotten" PR was the only branch using it. \item \code{pr_finish()}: Does post-PR clean up, but does NOT actually merge or close a PR (maintainer should do this in the browser). If \code{number} is not given, infers the PR from the upstream tracking branch of the current branch. If \code{number} is given, it does not matter whether the PR exists locally. If PR exists locally, alerts the user to uncommitted or unpushed changes, then switches back to the default branch, pulls changes from source repo, and deletes local PR branch. If the PR came from an external fork, any associated Git remote is deleted, provided it's not in use by any other local branches. If the PR has been merged and user has permission, deletes the remote branch (this is the only remote operation that \code{pr_finish()} potentially does). } } \examples{ \dontrun{ pr_fetch(123) } } usethis/man/tidyverse.Rd0000644000176200001440000001363514717524762015045 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github-actions.R, R/tidyverse.R, R/upkeep.R \name{use_tidy_github_actions} \alias{use_tidy_github_actions} \alias{tidyverse} \alias{create_tidy_package} \alias{use_tidy_description} \alias{use_tidy_dependencies} \alias{use_tidy_contributing} \alias{use_tidy_support} \alias{use_tidy_issue_template} \alias{use_tidy_coc} \alias{use_tidy_github} \alias{use_tidy_style} \alias{use_tidy_logo} \alias{use_tidy_upkeep_issue} \title{Helpers for tidyverse development} \usage{ use_tidy_github_actions(ref = NULL) create_tidy_package(path, copyright_holder = NULL) use_tidy_description() use_tidy_dependencies() use_tidy_contributing() use_tidy_support() use_tidy_issue_template() use_tidy_coc() use_tidy_github() use_tidy_style(strict = TRUE) use_tidy_logo(geometry = "240x278", retina = TRUE) use_tidy_upkeep_issue(last_upkeep = last_upkeep_year()) } \arguments{ \item{ref}{Desired Git reference, usually the name of a tag (\code{"v2"}) or branch (\code{"main"}). Other possibilities include a commit SHA (\code{"d1c516d"}) or \code{"HEAD"} (meaning "tip of remote's default branch"). If not specified, defaults to the latest published release of \code{r-lib/actions} (\url{https://github.com/r-lib/actions/releases}).} \item{path}{A path. If it exists, it is used. If it does not exist, it is created, provided that the parent path exists.} \item{copyright_holder}{Name of the copyright holder or holders. This defaults to \code{"{package name} authors"}; you should only change this if you use a CLA to assign copyright to a single entity.} \item{strict}{Boolean indicating whether or not a strict version of styling should be applied. See \code{\link[styler:tidyverse_style]{styler::tidyverse_style()}} for details.} \item{geometry}{a \link[magick:geometry]{magick::geometry} string specifying size. The default assumes that you have a hex logo using spec from \url{http://hexb.in/sticker.html}.} \item{retina}{\code{TRUE}, the default, scales the image on the README, assuming that geometry is double the desired size.} \item{last_upkeep}{Year of last upkeep. By default, the \code{Config/usethis/last-upkeep} field in \code{DESCRIPTION} is consulted for this, if it's defined. If there's no information on the last upkeep, the issue will contain the full checklist.} } \description{ These helpers follow tidyverse conventions which are generally a little stricter than the defaults, reflecting the need for greater rigor in commonly used packages. } \details{ \itemize{ \item \code{use_tidy_github_actions()}: Sets up the following workflows using \href{https://github.com/features/actions}{GitHub Actions}: \itemize{ \item Run \verb{R CMD check} on the current release, devel, and four previous versions of R. The build matrix also ensures \verb{R CMD check} is run at least once on each of the three major operating systems (Linux, macOS, and Windows). \item Report test coverage. \item Build and deploy a pkgdown site. \item Provide two commands to be used in pull requests: \verb{/document} to run \code{roxygen2::roxygenise()} and update the PR, and \verb{/style} to run \code{styler::style_pkg()} and update the PR. This is how the tidyverse team checks its packages, but it is overkill for less widely used packages. Consider using the more streamlined workflows set up by \code{\link[=use_github_actions]{use_github_actions()}} or \code{\link[=use_github_action_check_standard]{use_github_action_check_standard()}}. } } \itemize{ \item \code{create_tidy_package()}: creates a new package, immediately applies as many of the tidyverse conventions as possible, issues a few reminders, and activates the new package. \item \code{use_tidy_dependencies()}: sets up standard dependencies used by all tidyverse packages (except packages that are designed to be dependency free). \item \code{use_tidy_description()}: puts fields in standard order and alphabetises dependencies. \item \code{use_tidy_eval()}: imports a standard set of helpers to facilitate programming with the tidy eval toolkit. \item \code{use_tidy_style()}: styles source code according to the \href{https://style.tidyverse.org}{tidyverse style guide}. This function will overwrite files! See below for usage advice. \item \code{use_tidy_contributing()}: adds standard tidyverse contributing guidelines. \item \code{use_tidy_issue_template()}: adds a standard tidyverse issue template. \item \code{use_tidy_release_test_env()}: updates the test environment section in \code{cran-comments.md}. \item \code{use_tidy_support()}: adds a standard description of support resources for the tidyverse. \item \code{use_tidy_coc()}: equivalent to \code{use_code_of_conduct()}, but puts the document in a \verb{.github/} subdirectory. \item \code{use_tidy_github()}: convenience wrapper that calls \code{use_tidy_contributing()}, \code{use_tidy_issue_template()}, \code{use_tidy_support()}, \code{use_tidy_coc()}. \item \code{\link[=use_tidy_github_labels]{use_tidy_github_labels()}} calls \code{use_github_labels()} to implement tidyverse conventions around GitHub issue label names and colours. \item \code{use_tidy_upkeep_issue()} creates an issue containing a checklist of actions to bring your package up to current tidyverse standards. Also records the current date in the \code{Config/usethis/last-upkeep} field in \code{DESCRIPTION}. \item \code{use_tidy_logo()} calls \code{use_logo()} on the appropriate hex sticker PNG file at \url{https://github.com/rstudio/hex-stickers}. } } \section{\code{use_tidy_style()}}{ Uses the \href{https://styler.r-lib.org}{styler package} package to style all code in a package, project, or directory, according to the \href{https://style.tidyverse.org}{tidyverse style guide}. \strong{Warning:} This function will overwrite files! It is strongly suggested to only style files that are under version control or to first create a backup copy. Invisibly returns a data frame with one row per file, that indicates whether styling caused a change. } usethis/man/use_github_actions.Rd0000644000176200001440000000306014717524721016667 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/usethis-deprecated.R \name{use_github_actions} \alias{use_github_actions} \alias{use_github_action_check_release} \alias{use_github_action_check_standard} \alias{use_github_action_pr_commands} \alias{use_github_action_check_full} \title{Deprecated GitHub Actions functions} \usage{ use_github_actions() use_github_action_check_release( save_as = "R-CMD-check.yaml", ref = NULL, ignore = TRUE, open = FALSE ) use_github_action_check_standard( save_as = "R-CMD-check.yaml", ref = NULL, ignore = TRUE, open = FALSE ) use_github_action_pr_commands( save_as = "pr-commands.yaml", ref = NULL, ignore = TRUE, open = FALSE ) use_github_action_check_full( save_as = "R-CMD-check.yaml", ignore = TRUE, open = FALSE, repo_spec = NULL ) } \description{ \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}} \itemize{ \item \code{use_github_actions()} is deprecated because it was just an alias for \code{\link[=use_github_action_check_release]{use_github_action_check_release()}}. \item \code{use_github_action_check_full()} is overkill for most packages and is not recommended. \item \code{use_github_action_check_release()}, \code{use_github_action_check_standard()}, and \code{use_github_action_pr_commands()} are deprecated in favor of \code{\link[=use_github_action]{use_github_action()}}, which can now suggest specific workflows to use. } } \keyword{internal} usethis/man/create_package.Rd0000644000176200001440000000445214662214365015734 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/create.R \name{create_package} \alias{create_package} \alias{create_project} \title{Create a package or project} \usage{ create_package( path, fields = list(), rstudio = rstudioapi::isAvailable(), roxygen = TRUE, check_name = TRUE, open = rlang::is_interactive() ) create_project( path, rstudio = rstudioapi::isAvailable(), open = rlang::is_interactive() ) } \arguments{ \item{path}{A path. If it exists, it is used. If it does not exist, it is created, provided that the parent path exists.} \item{fields}{A named list of fields to add to \code{DESCRIPTION}, potentially overriding default values. See \code{\link[=use_description]{use_description()}} for how you can set personalized defaults using package options.} \item{rstudio}{If \code{TRUE}, calls \code{\link[=use_rstudio]{use_rstudio()}} to make the new package or project into an \href{https://r-pkgs.org/workflow101.html#sec-workflow101-rstudio-projects}{RStudio Project}. If \code{FALSE} and a non-package project, a sentinel \code{.here} file is placed so that the directory can be recognized as a project by the \href{https://here.r-lib.org}{here} or \href{https://rprojroot.r-lib.org}{rprojroot} packages.} \item{roxygen}{Do you plan to use roxygen2 to document your package?} \item{check_name}{Whether to check if the name is valid for CRAN and throw an error if not.} \item{open}{If \code{TRUE}, \link[=proj_activate]{activates} the new project: \itemize{ \item If using RStudio desktop, the package is opened in a new session. \item If on RStudio server, the current RStudio project is activated. \item Otherwise, the working directory and active project is changed. }} } \value{ Path to the newly created project or package, invisibly. } \description{ These functions create an R project: \itemize{ \item \code{create_package()} creates an R package \item \code{create_project()} creates a non-package project, i.e. a data analysis project } Both functions can be called on an existing project; you will be asked before any existing files are changed. } \seealso{ \code{\link[=create_tidy_package]{create_tidy_package()}} is a convenience function that extends \code{create_package()} by immediately applying as many of the tidyverse development conventions as possible. } usethis/man/use_revdep.Rd0000644000176200001440000000106414651000165015140 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/revdep.R \name{use_revdep} \alias{use_revdep} \title{Reverse dependency checks} \usage{ use_revdep() } \description{ Performs set up for checking the reverse dependencies of an R package, as implemented by the revdepcheck package: \itemize{ \item Creates \verb{revdep/} directory and adds it to \code{.Rbuildignore} \item Populates \code{revdep/.gitignore} to prevent tracking of various revdep artefacts \item Prompts user to run the checks with \code{revdepcheck::revdep_check()} } } usethis/man/git-default-branch.Rd0000644000176200001440000001175414651000165016446 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git-default-branch.R \name{git-default-branch} \alias{git-default-branch} \alias{git_default_branch} \alias{git_default_branch_configure} \alias{git_default_branch_rediscover} \alias{git_default_branch_rename} \title{Get or set the default Git branch} \usage{ git_default_branch() git_default_branch_configure(name = "main") git_default_branch_rediscover(current_local_default = NULL) git_default_branch_rename(from = NULL, to = "main") } \arguments{ \item{name}{Default name for the initial branch in new Git repositories.} \item{current_local_default}{Name of the local branch that is currently functioning as the default branch. If unspecified, this can often be inferred.} \item{from}{Name of the branch that is currently functioning as the default branch.} \item{to}{New name for the default branch.} } \value{ Name of the default branch. } \description{ The \verb{git_default_branch*()} functions put some structure around the somewhat fuzzy (but definitely real) concept of the default branch. In particular, they support new conventions around the Git default branch name, globally or in a specific project / Git repository. } \section{Background on the default branch}{ Technically, Git has no official concept of the default branch. But in reality, almost all Git repos have an \emph{effective default branch}. If there's only one branch, this is it! It is the branch that most bug fixes and features get merged in to. It is the branch you see when you first visit a repo on a site such as GitHub. On a Git remote, it is the branch that \code{HEAD} points to. Historically, \code{master} has been the most common name for the default branch, but \code{main} is an increasingly popular choice. } \section{\code{git_default_branch_configure()}}{ This configures \code{init.defaultBranch} at the global (a.k.a user) level. This setting determines the name of the branch that gets created when you make the first commit in a new Git repo. \code{init.defaultBranch} only affects the local Git repos you create in the future. } \section{\code{git_default_branch()}}{ This figures out the default branch of the current Git repo, integrating information from the local repo and, if applicable, the \code{upstream} or \code{origin} remote. If there is a local vs. remote mismatch, \code{git_default_branch()} throws an error with advice to call \code{git_default_branch_rediscover()} to repair the situation. For a remote repo, the default branch is the branch that \code{HEAD} points to. For the local repo, if there is only one branch, that must be the default! Otherwise we try to identify the relevant local branch by looking for specific branch names, in this order: \itemize{ \item whatever the default branch of \code{upstream} or \code{origin} is, if applicable \item \code{main} \item \code{master} \item the value of the Git option \code{init.defaultBranch}, with the usual deal where a local value, if present, takes precedence over a global (a.k.a. user-level) value } } \section{\code{git_default_branch_rediscover()}}{ This consults an external authority -- specifically, the remote \strong{source repo} on GitHub -- to learn the default branch of the current project / repo. If that doesn't match the apparent local default branch (for example, the project switched from \code{master} to \code{main}), we do the corresponding branch renaming in your local repo and, if relevant, in your fork. See \url{https://happygitwithr.com/common-remote-setups.html} for more about GitHub remote configurations and, e.g., what we mean by the source repo. This function works for the configurations \code{"ours"}, \code{"fork"}, and \code{"theirs"}. } \section{\code{git_default_branch_rename()}}{ Note: this only works for a repo that you effectively own. In terms of GitHub, you must own the \strong{source repo} personally or, if organization-owned, you must have \code{admin} permission on the \strong{source repo}. This renames the default branch in the \strong{source repo} on GitHub and then calls \code{git_default_branch_rediscover()}, to make any necessary changes in the local repo and, if relevant, in your personal fork. See \url{https://happygitwithr.com/common-remote-setups.html} for more about GitHub remote configurations and, e.g., what we mean by the source repo. This function works for the configurations \code{"ours"}, \code{"fork"}, and \code{"no_github"}. Regarding \code{"no_github"}: Of course, this function does what you expect for a local repo with no GitHub remotes, but that is not the primary use case. } \examples{ \dontrun{ git_default_branch() } \dontrun{ git_default_branch_configure() } \dontrun{ git_default_branch_rediscover() # you can always explicitly specify the local branch that's been playing the # role of the default git_default_branch_rediscover("unconventional_default_branch_name") } \dontrun{ git_default_branch_rename() # you can always explicitly specify one or both branch names git_default_branch_rename(from = "this", to = "that") } } usethis/man/use_rscloud_badge.Rd0000644000176200001440000000102514717524721016461 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/usethis-deprecated.R \name{use_rscloud_badge} \alias{use_rscloud_badge} \title{Deprecated badge function} \usage{ use_rscloud_badge(url) } \description{ \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}} \code{use_rscloud_badge()} has been replaced by \code{\link[=use_posit_cloud_badge]{use_posit_cloud_badge()}}. } \keyword{internal} usethis/man/proj_utils.Rd0000644000176200001440000001023414651000165015170 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/proj.R \name{proj_utils} \alias{proj_utils} \alias{proj_get} \alias{proj_set} \alias{proj_path} \alias{with_project} \alias{local_project} \title{Utility functions for the active project} \usage{ proj_get() proj_set(path = ".", force = FALSE) proj_path(..., ext = "") with_project( path = ".", code, force = FALSE, setwd = TRUE, quiet = getOption("usethis.quiet", default = FALSE) ) local_project( path = ".", force = FALSE, setwd = TRUE, quiet = getOption("usethis.quiet", default = FALSE), .local_envir = parent.frame() ) } \arguments{ \item{path}{Path to set. This \code{path} should exist or be \code{NULL}.} \item{force}{If \code{TRUE}, use this path without checking the usual criteria for a project. Use sparingly! The main application is to solve a temporary chicken-egg problem: you need to set the active project in order to add project-signalling infrastructure, such as initialising a Git repo or adding a \code{DESCRIPTION} file.} \item{...}{character vectors, if any values are NA, the result will also be NA. The paths follow the recycling rules used in the tibble package, namely that only length 1 arguments are recycled.} \item{ext}{An optional extension to append to the generated path.} \item{code}{Code to run with temporary active project} \item{setwd}{Whether to also temporarily set the working directory to the active project, if it is not \code{NULL}} \item{quiet}{Whether to suppress user-facing messages, while operating in the temporary active project} \item{.local_envir}{The environment to use for scoping. Defaults to current execution environment.} } \description{ Most \verb{use_*()} functions act on the \strong{active project}. If it is unset, usethis uses \href{https://rprojroot.r-lib.org}{rprojroot} to find the project root of the current working directory. It establishes the project root by looking for a \code{.here} file, an RStudio Project, a package \code{DESCRIPTION}, Git infrastructure, a \code{remake.yml} file, or a \code{.projectile} file. It then stores the active project for use for the remainder of the session. In general, end user scripts should not contain direct calls to \verb{usethis::proj_*()} utility functions. They are internal functions that are exported for occasional interactive use or use in packages that extend usethis. End user code should call functions in \href{https://rprojroot.r-lib.org}{rprojroot} or its simpler companion, \href{https://here.r-lib.org}{here}, to programmatically detect a project and build paths within it. If you are puzzled why a path (usually the current working directory) does \emph{not} appear to be inside project, it can be helpful to call \code{here::dr_here()} to get much more verbose feedback. } \section{Functions}{ \itemize{ \item \code{proj_get()}: Retrieves the active project and, if necessary, attempts to set it in the first place. \item \code{proj_set()}: Sets the active project. \item \code{proj_path()}: Builds paths within the active project returned by \code{proj_get()}. Thin wrapper around \code{\link[fs:path]{fs::path()}}. \item \code{with_project()}: Runs code with a temporary active project and, optionally, working directory. It is an example of the \verb{with_*()} functions in \href{https://withr.r-lib.org}{withr}. \item \code{local_project()}: Sets an active project and, optionally, working directory until the current execution environment goes out of scope, e.g. the end of the current function or test. It is an example of the \verb{local_*()} functions in \href{https://withr.r-lib.org}{withr}. }} \examples{ \dontrun{ ## see the active project proj_get() ## manually set the active project proj_set("path/to/target/project") ## build a path within the active project (both produce same result) proj_path("R/foo.R") proj_path("R", "foo", ext = "R") ## build a path within SOME OTHER project with_project("path/to/some/other/project", proj_path("blah.R")) ## convince yourself that with_project() temporarily changes the project with_project("path/to/some/other/project", print(proj_sitrep())) } } \seealso{ Other project functions: \code{\link{proj_sitrep}()} } \concept{project functions} usethis/man/ui_silence.Rd0000644000176200001440000000100014651000165015104 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utils-ui.R \name{ui_silence} \alias{ui_silence} \title{Suppress usethis's messaging} \usage{ ui_silence(code) } \arguments{ \item{code}{Code to execute with usual UI output silenced.} } \value{ Whatever \code{code} returns. } \description{ Execute a bit of code without usethis's normal messaging. } \examples{ # compare the messaging you see from this: browse_github("usethis") # vs. this: ui_silence( browse_github("usethis") ) } usethis/man/use_cpp11.Rd0000644000176200001440000000101214651000165014570 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/cpp11.R \name{use_cpp11} \alias{use_cpp11} \title{Use C++ via the cpp11 package} \usage{ use_cpp11() } \description{ Adds infrastructure needed to use the \href{https://cpp11.r-lib.org}{cpp11} package, a header-only R package that helps R package developers handle R objects with C++ code: \itemize{ \item Creates \verb{src/} \item Adds cpp11 to \code{DESCRIPTION} \item Creates \code{src/code.cpp}, an initial placeholder \code{.cpp} file } } usethis/man/use_git_config.Rd0000644000176200001440000000237014651000165015764 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{use_git_config} \alias{use_git_config} \title{Configure Git} \usage{ use_git_config(scope = c("user", "project"), ...) } \arguments{ \item{scope}{Edit globally for the current \strong{user}, or locally for the current \strong{project}} \item{...}{Name-value pairs, processed as <\code{\link[rlang:dyn-dots]{dynamic-dots}}>.} } \value{ Invisibly, the previous values of the modified components, as a named list. } \description{ Sets Git options, for either the user or the project ("global" or "local", in Git terminology). Wraps \code{\link[gert:git_config]{gert::git_config_set()}} and \code{\link[gert:git_config]{gert::git_config_global_set()}}. To inspect Git config, see \code{\link[gert:git_config]{gert::git_config()}}. } \examples{ \dontrun{ # set the user's global user.name and user.email use_git_config(user.name = "Jane", user.email = "jane@example.org") # set the user.name and user.email locally, i.e. for current repo/project use_git_config( scope = "project", user.name = "Jane", user.email = "jane@example.org" ) } } \seealso{ Other git helpers: \code{\link{use_git}()}, \code{\link{use_git_hook}()}, \code{\link{use_git_ignore}()} } \concept{git helpers} usethis/man/use_lifecycle.Rd0000644000176200001440000000133714651000165015615 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/lifecycle.R \name{use_lifecycle} \alias{use_lifecycle} \title{Use lifecycle badges} \usage{ use_lifecycle() } \description{ This helper: \itemize{ \item Adds lifecycle as a dependency. \item Imports \code{\link[lifecycle:deprecated]{lifecycle::deprecated()}} for use in function arguments. \item Copies the lifecycle badges into \code{man/figures}. \item Reminds you how to use the badge syntax. } Learn more at \url{https://lifecycle.r-lib.org/articles/communicate.html} } \seealso{ \code{\link[=use_lifecycle_badge]{use_lifecycle_badge()}} to signal the \href{https://lifecycle.r-lib.org/articles/stages.html}{lifecycle stage} of your package as whole } usethis/man/use_spell_check.Rd0000644000176200001440000000175714651000165016140 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spelling.R \name{use_spell_check} \alias{use_spell_check} \title{Use spell check} \usage{ use_spell_check(vignettes = TRUE, lang = "en-US", error = FALSE) } \arguments{ \item{vignettes}{Logical, \code{TRUE} to spell check all \code{rmd} and \code{rnw} files in the \verb{vignettes/} folder.} \item{lang}{Preferred spelling language. Usually either \code{"en-US"} or \code{"en-GB"}.} \item{error}{Logical, indicating whether the unit test should fail if spelling errors are found. Defaults to \code{FALSE}, which does not error, but prints potential spelling errors} } \description{ Adds a unit test to automatically run a spell check on documentation and, optionally, vignettes during \verb{R CMD check}, using the \link[spelling:spell_check_package]{spelling} package. Also adds a \code{WORDLIST} file to the package, which is a dictionary of whitelisted words. See \link[spelling:wordlist]{spelling::wordlist} for details. } usethis/man/use_author.Rd0000644000176200001440000000537214717524762015204 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/author.R \name{use_author} \alias{use_author} \title{Add an author to the \code{Authors@R} field in DESCRIPTION} \usage{ use_author(given = NULL, family = NULL, ..., role = "ctb") } \arguments{ \item{given}{a character vector with the \emph{given} names, or a list thereof.} \item{family}{a character string with the \emph{family} name, or a list thereof.} \item{...}{ Arguments passed on to \code{\link[utils:person]{utils::person}} \describe{ \item{\code{middle}}{a character string with the collapsed middle name(s). Deprecated, see \bold{Details}.} \item{\code{email}}{a character string (or vector) giving an e-mail address (each), or a list thereof.} \item{\code{comment}}{a character string (or vector) providing comments, or a list thereof.} \item{\code{first}}{a character string giving the first name. Deprecated, see \bold{Details}.} \item{\code{last}}{a character string giving the last name. Deprecated, see \bold{Details}.} }} \item{role}{a character vector specifying the role(s) of the person (see \bold{Details}), or a list thereof.} } \description{ \code{use_author()} adds a person to the \code{Authors@R} field of the DESCRIPTION file, creating that field if necessary. It will not modify, e.g., the role(s) or email of an existing author (judged using their "Given Family" name). For that we recommend editing DESCRIPTION directly. Or, for programmatic use, consider calling the more specialized functions available in the \pkg{desc} package directly. \code{use_author()} also surfaces two other situations you might want to address: \itemize{ \item Explicit use of the fields \code{Author} or \code{Maintainer}. We recommend switching to the more modern \code{Authors@R} field instead, because it offers richer metadata for various downstream uses. (Note that \code{Authors@R} is \emph{eventually} processed to create \code{Author} and \code{Maintainer} fields, but only when the \code{tar.gz} is built from package source.) \item Presence of the fake author placed by \code{\link[=create_package]{create_package()}} and \code{\link[=use_description]{use_description()}}. This happens when \pkg{usethis} has to create a DESCRIPTION file and the user hasn't given any author information via the \code{fields} argument or the global option \code{"usethis.description"}. The placeholder looks something like \verb{First Last [aut, cre]} and \code{use_author()} offers to remove it in interactive sessions. } } \examples{ \dontrun{ use_author( given = "Lucy", family = "van Pelt", role = c("aut", "cre"), email = "lucy@example.com", comment = c(ORCID = "LUCY-ORCID-ID") ) use_author("Charlie", "Brown") } } usethis/man/use_upkeep_issue.Rd0000644000176200001440000000173714717524762016404 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/upkeep.R \name{use_upkeep_issue} \alias{use_upkeep_issue} \title{Create an upkeep checklist in a GitHub issue} \usage{ use_upkeep_issue(year = NULL) } \arguments{ \item{year}{Year you are performing the upkeep, used in the issue title. Defaults to current year} } \description{ This opens an issue in your package repository with a checklist of tasks for regular maintenance of your package. This is a fairly opinionated list of tasks but we believe taking care of them will generally make your package better, easier to maintain, and more enjoyable for your users. Some of the tasks are meant to be performed only once (and once completed shouldn't show up in subsequent lists), and some should be reviewed periodically. The tidyverse team uses a similar function \code{\link[=use_tidy_upkeep_issue]{use_tidy_upkeep_issue()}} for our annual package Spring Cleaning. } \examples{ \dontrun{ use_upkeep_issue() } } usethis/man/use_r.Rd0000644000176200001440000000511214717524721014126 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/r.R \name{use_r} \alias{use_r} \alias{use_test} \title{Create or edit R or test files} \usage{ use_r(name = NULL, open = rlang::is_interactive()) use_test(name = NULL, open = rlang::is_interactive()) } \arguments{ \item{name}{Either a string giving a file name (without directory) or \code{NULL} to take the name from the currently open file in RStudio.} \item{open}{Whether to open the file for interactive editing.} } \description{ This pair of functions makes it easy to create paired R and test files, using the convention that the tests for \code{R/foofy.R} should live in \code{tests/testthat/test-foofy.R}. You can use them to create new files from scratch by supplying \code{name}, or if you use RStudio, you can call to create (or navigate to) the companion file based on the currently open file. This also works when a test snapshot file is active, i.e. if you're looking at \verb{tests/testthat/_snaps/foofy.md}, \code{use_r()} or \code{use_test()} take you to \code{R/foofy.R} or \code{tests/testthat/test-foofy.R}, respectively. } \section{Renaming files in an existing package}{ Here are some tips on aligning file names across \verb{R/} and \verb{tests/testthat/} in an existing package that did not necessarily follow this convention before. This script generates a data frame of \verb{R/} and test files that can help you identify missed opportunities for pairing: \if{html}{\out{
}}\preformatted{library(fs) library(tidyverse) bind_rows( tibble( type = "R", path = dir_ls("R/", regexp = "\\\\.[Rr]$"), name = as.character(path_ext_remove(path_file(path))), ), tibble( type = "test", path = dir_ls("tests/testthat/", regexp = "/test[^/]+\\\\.[Rr]$"), name = as.character(path_ext_remove(str_remove(path_file(path), "^test[-_]"))), ) ) \%>\% pivot_wider(names_from = type, values_from = path) \%>\% print(n = Inf) }\if{html}{\out{
}} The \code{\link[=rename_files]{rename_files()}} function can also be helpful. } \examples{ \dontrun{ # create a new .R file below R/ use_r("coolstuff") # if `R/coolstuff.R` is active in a supported IDE, you can now do: use_test() # if `tests/testthat/test-coolstuff.R` is active in a supported IDE, you can # return to `R/coolstuff.R` with: use_r() } } \seealso{ \itemize{ \item The \href{https://r-pkgs.org/testing-basics.html}{testing} and \href{https://r-pkgs.org/code.html}{R code} chapters of \href{https://r-pkgs.org}{R Packages}. \item \code{\link[=use_test_helper]{use_test_helper()}} to create a testthat helper file. } } usethis/man/licenses.Rd0000644000176200001440000000576414651000165014617 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/license.R \name{licenses} \alias{use_mit_license} \alias{use_gpl_license} \alias{use_agpl_license} \alias{use_lgpl_license} \alias{use_apache_license} \alias{use_cc0_license} \alias{use_ccby_license} \alias{use_proprietary_license} \alias{use_gpl3_license} \alias{use_agpl3_license} \alias{use_apl2_license} \title{License a package} \usage{ use_mit_license(copyright_holder = NULL) use_gpl_license(version = 3, include_future = TRUE) use_agpl_license(version = 3, include_future = TRUE) use_lgpl_license(version = 3, include_future = TRUE) use_apache_license(version = 2, include_future = TRUE) use_cc0_license() use_ccby_license() use_proprietary_license(copyright_holder) } \arguments{ \item{copyright_holder}{Name of the copyright holder or holders. This defaults to \code{"{package name} authors"}; you should only change this if you use a CLA to assign copyright to a single entity.} \item{version}{License version. This defaults to latest version all licenses.} \item{include_future}{If \code{TRUE}, will license your package under the current and any potential future versions of the license. This is generally considered to be good practice because it means your package will automatically include "bug" fixes in licenses.} } \description{ Adds the necessary infrastructure to declare your package as licensed with one of these popular open source licenses: Permissive: \itemize{ \item \href{https://choosealicense.com/licenses/mit/}{MIT}: simple and permissive. \item \href{https://choosealicense.com/licenses/apache-2.0/}{Apache 2.0}: MIT + provides patent protection. } Copyleft: \itemize{ \item \href{https://choosealicense.com/licenses/gpl-2.0/}{GPL v2}: requires sharing of improvements. \item \href{https://choosealicense.com/licenses/gpl-3.0/}{GPL v3}: requires sharing of improvements. \item \href{https://choosealicense.com/licenses/agpl-3.0/}{AGPL v3}: requires sharing of improvements. \item \href{https://choosealicense.com/licenses/lgpl-2.1/}{LGPL v2.1}: requires sharing of improvements. \item \href{https://choosealicense.com/licenses/lgpl-3.0/}{LGPL v3}: requires sharing of improvements. } Creative commons licenses appropriate for data packages: \itemize{ \item \href{https://creativecommons.org/publicdomain/zero/1.0/}{CC0}: dedicated to public domain. \item \href{https://creativecommons.org/licenses/by/4.0/}{CC-BY}: Free to share and adapt, must give appropriate credit. } See \url{https://choosealicense.com} for more details and other options. Alternatively, for code that you don't want to share with others, \code{use_proprietary_license()} makes it clear that all rights are reserved, and the code is not open source. } \details{ CRAN does not permit you to include copies of standard licenses in your package, so these functions save the license as \code{LICENSE.md} and add it to \code{.Rbuildignore}. } \seealso{ For more details, refer to the the \href{https://r-pkgs.org/license.html}{license chapter} in \emph{R Packages}. } usethis/man/use_version.Rd0000644000176200001440000000342514651000165015343 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/version.R \name{use_version} \alias{use_version} \alias{use_dev_version} \title{Increment package version} \usage{ use_version(which = NULL, push = FALSE) use_dev_version(push = FALSE) } \arguments{ \item{which}{A string specifying which level to increment, one of: "major", "minor", "patch", "dev". If \code{NULL}, user can choose interactively.} \item{push}{If \code{TRUE}, also attempts to push the commits to the remote branch.} } \description{ usethis supports semantic versioning, which is described in more detail in the \href{https://r-pkgs.org/lifecycle.html#sec-lifecycle-version-number}{version section} of \href{https://r-pkgs.org}{R Packages}. A version number breaks down like so: \if{html}{\out{
}}\preformatted{.. (released version) ... (dev version) }\if{html}{\out{
}} \code{use_version()} increments the "Version" field in \code{DESCRIPTION}, adds a new heading to \code{NEWS.md} (if it exists), commits those changes (if package uses Git), and optionally pushes (if safe to do so). It makes the same update to a line like \code{PKG_version = "x.y.z";} in \code{src/version.c} (if it exists). \code{use_dev_version()} increments to a development version, e.g. from 1.0.0 to 1.0.0.9000. If the existing version is already a development version with four components, it does nothing. Thin wrapper around \code{use_version()}. } \examples{ \dontrun{ ## for interactive selection, do this: use_version() ## request a specific type of increment use_version("minor") use_dev_version() } } \seealso{ The \href{https://r-pkgs.org/lifecycle.html#sec-lifecycle-version-number}{version section} of \href{https://r-pkgs.org}{R Packages}. } usethis/man/use_citation.Rd0000644000176200001440000000044214651000165015464 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/citation.R \name{use_citation} \alias{use_citation} \title{Create a CITATION template} \usage{ use_citation() } \description{ Use this if you want to encourage users of your package to cite an article or book. } usethis/man/ui-questions.Rd0000644000176200001440000000475514651000165015456 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ui-legacy.R \name{ui-questions} \alias{ui-questions} \alias{ui_yeah} \alias{ui_nope} \title{User interface - Questions} \usage{ ui_yeah( x, yes = c("Yes", "Definitely", "For sure", "Yup", "Yeah", "I agree", "Absolutely"), no = c("No way", "Not now", "Negative", "No", "Nope", "Absolutely not"), n_yes = 1, n_no = 2, shuffle = TRUE, .envir = parent.frame() ) ui_nope( x, yes = c("Yes", "Definitely", "For sure", "Yup", "Yeah", "I agree", "Absolutely"), no = c("No way", "Not now", "Negative", "No", "Nope", "Absolutely not"), n_yes = 1, n_no = 2, shuffle = TRUE, .envir = parent.frame() ) } \arguments{ \item{x}{A character vector. For block styles, conditions, and questions, each element of the vector becomes a line, and the result is processed by \code{\link[glue:glue]{glue::glue()}}. For inline styles, each element of the vector becomes an entry in a comma separated list.} \item{yes}{A character vector of "yes" strings, which are randomly sampled to populate the menu.} \item{no}{A character vector of "no" strings, which are randomly sampled to populate the menu.} \item{n_yes}{An integer. The number of "yes" strings to include.} \item{n_no}{An integer. The number of "no" strings to include.} \item{shuffle}{A logical. Should the order of the menu options be randomly shuffled?} \item{.envir}{Used to ensure that \code{\link[glue:glue]{glue::glue()}} gets the correct environment. For expert use only.} } \value{ A logical. \code{ui_yeah()} returns \code{TRUE} when the user selects a "yes" option and \code{FALSE} otherwise, i.e. when user selects a "no" option or refuses to make a selection (cancels). \code{ui_nope()} is the logical opposite of \code{ui_yeah()}. } \description{ \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#superseded}{\figure{lifecycle-superseded.svg}{options: alt='[Superseded]'}}}{\strong{[Superseded]}} \code{ui_yeah()} and \code{ui_nope()} are technically superseded, but, unlike the rest of the legacy \code{\link[=ui-legacy-functions]{ui_*()}} functions, there's not yet a drop-in replacement available in the \href{https://cli.r-lib.org/}{cli package}. \code{ui_yeah()} and \code{ui_nope()} are no longer used internally in usethis. } \examples{ \dontrun{ ui_yeah("Do you like R?") ui_nope("Have you tried turning it off and on again?", n_yes = 1, n_no = 1) ui_yeah("Are you sure its plugged in?", yes = "Yes", no = "No", shuffle = FALSE) } } \keyword{internal} usethis/man/use_jenkins.Rd0000644000176200001440000000120414651000165015310 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/jenkins.R \name{use_jenkins} \alias{use_jenkins} \title{Create Jenkinsfile for Jenkins CI Pipelines} \usage{ use_jenkins() } \description{ \code{use_jenkins()} adds a basic Jenkinsfile for R packages to the project root directory. The Jenkinsfile stages take advantage of calls to \code{make}, and so calling this function will also run \code{use_make()} if a Makefile does not already exist at the project root. } \seealso{ The \href{https://www.jenkins.io/doc/book/pipeline/jenkinsfile/}{documentation on Jenkins Pipelines}. \code{\link[=use_make]{use_make()}} } usethis/man/zip-utils.Rd0000644000176200001440000000624514717524721014761 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/course.R \name{zip-utils} \alias{zip-utils} \alias{use_course} \alias{use_zip} \title{Download and unpack a ZIP file} \usage{ use_course(url, destdir = getOption("usethis.destdir")) use_zip( url, destdir = getwd(), cleanup = if (rlang::is_interactive()) NA else FALSE ) } \arguments{ \item{url}{Link to a ZIP file containing the materials. To reduce the chance of typos in live settings, these shorter forms are accepted: \itemize{ \item GitHub repo spec: "OWNER/REPO". Equivalent to \verb{https://github.com/OWNER/REPO/DEFAULT_BRANCH.zip}. \item bit.ly, pos.it, or rstd.io shortlinks: "bit.ly/xxx-yyy-zzz", "pos.it/foofy" or "rstd.io/foofy". The instructor must then arrange for the shortlink to point to a valid download URL for the target ZIP file. The helper \code{\link[=create_download_url]{create_download_url()}} helps to create such URLs for GitHub, DropBox, and Google Drive. }} \item{destdir}{Destination for the new folder. Defaults to the location stored in the global option \code{usethis.destdir}, if defined, or to the user's Desktop or similarly conspicuous place otherwise.} \item{cleanup}{Whether to delete the original ZIP file after unpacking its contents. In an interactive setting, \code{NA} leads to a menu where user can approve the deletion (or decline).} } \value{ Path to the new directory holding the unpacked ZIP file, invisibly. } \description{ Functions to download and unpack a ZIP file into a local folder of files, with very intentional default behaviour. Useful in pedagogical settings or anytime you need a large audience to download a set of files quickly and actually be able to find them. The underlying helpers are documented in \link{use_course_details}. } \section{Functions}{ \itemize{ \item \code{use_course()}: Designed with live workshops in mind. Includes intentional friction to highlight the download destination. Workflow: \itemize{ \item User executes, e.g., \code{use_course("bit.ly/xxx-yyy-zzz")}. \item User is asked to notice and confirm the location of the new folder. Specify \code{destdir} or configure the \code{"usethis.destdir"} option to prevent this. \item User is asked if they'd like to delete the ZIP file. \item If new folder contains an \code{.Rproj} file, a new instance of RStudio is launched. Otherwise, the folder is opened in the file manager, e.g. Finder or File Explorer. } \item \code{use_zip()}: More useful in day-to-day work. Downloads in current working directory, by default, and allows \code{cleanup} behaviour to be specified. }} \examples{ \dontrun{ # download the source of usethis from GitHub, behind a bit.ly shortlink use_course("bit.ly/usethis-shortlink-example") use_course("http://bit.ly/usethis-shortlink-example") # download the source of rematch2 package from CRAN use_course("https://cran.r-project.org/bin/windows/contrib/3.4/rematch2_2.0.1.zip") # download the source of rematch2 package from GitHub, 4 ways use_course("r-lib/rematch2") use_course("https://api.github.com/repos/r-lib/rematch2/zipball/HEAD") use_course("https://api.github.com/repos/r-lib/rematch2/zipball/main") use_course("https://github.com/r-lib/rematch2/archive/main.zip") } } usethis/man/ui-legacy-functions.Rd0000644000176200001440000000703714717524762016713 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ui-legacy.R \name{ui-legacy-functions} \alias{ui-legacy-functions} \alias{ui_line} \alias{ui_todo} \alias{ui_done} \alias{ui_oops} \alias{ui_info} \alias{ui_code_block} \alias{ui_stop} \alias{ui_warn} \alias{ui_field} \alias{ui_value} \alias{ui_path} \alias{ui_code} \alias{ui_unset} \title{Legacy functions related to user interface} \usage{ ui_line(x = character(), .envir = parent.frame()) ui_todo(x, .envir = parent.frame()) ui_done(x, .envir = parent.frame()) ui_oops(x, .envir = parent.frame()) ui_info(x, .envir = parent.frame()) ui_code_block(x, copy = rlang::is_interactive(), .envir = parent.frame()) ui_stop(x, .envir = parent.frame()) ui_warn(x, .envir = parent.frame()) ui_field(x) ui_value(x) ui_path(x, base = NULL) ui_code(x) ui_unset(x = "unset") } \arguments{ \item{x}{A character vector. For block styles, conditions, and questions, each element of the vector becomes a line, and the result is processed by \code{\link[glue:glue]{glue::glue()}}. For inline styles, each element of the vector becomes an entry in a comma separated list.} \item{.envir}{Used to ensure that \code{\link[glue:glue]{glue::glue()}} gets the correct environment. For expert use only.} \item{copy}{If \code{TRUE}, the session is interactive, and the clipr package is installed, will copy the code block to the clipboard.} \item{base}{If specified, paths will be displayed relative to this path.} } \value{ The block styles, conditions, and questions are called for their side-effect. The inline styles return a string. } \description{ \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#superseded}{\figure{lifecycle-superseded.svg}{options: alt='[Superseded]'}}}{\strong{[Superseded]}} These functions are now superseded. External users of the \verb{usethis::ui_*()} functions are encouraged to use the \href{https://cli.r-lib.org/}{cli package} instead. The cli package did not have the required functionality when the \verb{usethis::ui_*()} functions were created, but it has had that for a while now and it's the superior option. There is even a cli vignette about how to make this transition: \code{vignette("usethis-ui", package = "cli")}. usethis itself now uses cli internally for its UI, but these new functions are not exported and presumably never will be. There is a developer-focused article on the process of transitioning usethis's own UI to use cli: \href{https://usethis.r-lib.org/articles/ui-cli-conversion.html}{Converting usethis's UI to use cli}. } \details{ The \code{ui_} functions can be broken down into four main categories: \itemize{ \item block styles: \code{ui_line()}, \code{ui_done()}, \code{ui_todo()}, \code{ui_oops()}, \code{ui_info()}. \item conditions: \code{ui_stop()}, \code{ui_warn()}. \item questions: \code{\link[=ui_yeah]{ui_yeah()}}, \code{\link[=ui_nope]{ui_nope()}}. \item inline styles: \code{ui_field()}, \code{ui_value()}, \code{ui_path()}, \code{ui_code()}, \code{ui_unset()}. } The question functions \code{\link[=ui_yeah]{ui_yeah()}} and \code{\link[=ui_nope]{ui_nope()}} have their own \link[=ui-questions]{help page}. All UI output (apart from \code{ui_yeah()}/\code{ui_nope()} prompts) can be silenced by setting \code{options(usethis.quiet = TRUE)}. Use \code{\link[=ui_silence]{ui_silence()}} to silence selected actions. } \examples{ new_val <- "oxnard" ui_done("{ui_field('name')} set to {ui_value(new_val)}") ui_todo("Redocument with {ui_code('devtools::document()')}") ui_code_block(c( "Line 1", "Line 2", "Line 3" )) } \keyword{internal} usethis/man/use_git_remote.Rd0000644000176200001440000000410414651000165016007 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{use_git_remote} \alias{use_git_remote} \alias{git_remotes} \title{Configure and report Git remotes} \usage{ use_git_remote(name = "origin", url, overwrite = FALSE) git_remotes() } \arguments{ \item{name}{A string giving the short name of a remote.} \item{url}{A string giving the url of a remote.} \item{overwrite}{Logical. Controls whether an existing remote can be modified.} } \value{ Named list of Git remotes. } \description{ Two helpers are available: \itemize{ \item \code{use_git_remote()} sets the remote associated with \code{name} to \code{url}. \item \code{git_remotes()} reports the configured remotes, similar to \verb{git remote -v}. } } \examples{ \dontrun{ # see current remotes git_remotes() # add new remote named 'foo', a la `git remote add ` use_git_remote(name = "foo", url = "https://github.com//.git") # remove existing 'foo' remote, a la `git remote remove ` use_git_remote(name = "foo", url = NULL, overwrite = TRUE) # change URL of remote 'foo', a la `git remote set-url ` use_git_remote( name = "foo", url = "https://github.com//.git", overwrite = TRUE ) # Scenario: Fix remotes when you cloned someone's repo, but you should # have fork-and-cloned (in order to make a pull request). # Store origin = main repo's URL, e.g., "git@github.com:/.git" upstream_url <- git_remotes()[["origin"]] # IN THE BROWSER: fork the main GitHub repo and get your fork's remote URL my_url <- "git@github.com:/.git" # Rotate the remotes use_git_remote(name = "origin", url = my_url) use_git_remote(name = "upstream", url = upstream_url) git_remotes() # Scenario: Add upstream remote to a repo that you fork-and-cloned, so you # can pull upstream changes. # Note: If you fork-and-clone via `usethis::create_from_github()`, this is # done automatically! # Get URL of main GitHub repo, probably in the browser upstream_url <- "git@github.com:/.git" use_git_remote(name = "upstream", url = upstream_url) } } usethis/man/roxygen/0000755000176200001440000000000014651000165014202 5ustar liggesusersusethis/man/roxygen/templates/0000755000176200001440000000000014651000165016200 5ustar liggesusersusethis/man/roxygen/templates/double-auth.R0000644000176200001440000000257214651000165020542 0ustar liggesusers#' @section Git/GitHub Authentication: #' Many usethis functions, including those documented here, potentially interact #' with GitHub in two different ways: #' * Via the GitHub REST API. Examples: create a repo, a fork, or a pull #' request. #' * As a conventional Git remote. Examples: clone, fetch, or push. #' #' Therefore two types of auth can happen and your credentials must be #' discoverable. Which credentials do we mean? #' #' * A GitHub personal access token (PAT) must be discoverable by the gh #' package, which is used for GitHub operations via the REST API. See #' [gh_token_help()] for more about getting and configuring a PAT. #' * If you use the HTTPS protocol for Git remotes, your PAT is also used for #' Git operations, such as `git push`. Usethis uses the gert package for this, #' so the PAT must be discoverable by gert. Generally gert and gh will #' discover and use the same PAT. This ability to "kill two birds with one #' stone" is why HTTPS + PAT is our recommended auth strategy for those new #' to Git and GitHub and PRs. #' * If you use SSH remotes, your SSH keys must also be discoverable, in #' addition to your PAT. The public key must be added to your GitHub account. #' #' Git/GitHub credential management is covered in a dedicated article: #' [Managing Git(Hub) Credentials](https://usethis.r-lib.org/articles/articles/git-credentials.html) usethis/man/git_vaccinate.Rd0000644000176200001440000000117714651000165015604 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{git_vaccinate} \alias{git_vaccinate} \title{Vaccinate your global gitignore file} \usage{ git_vaccinate() } \description{ Adds \code{.Rproj.user}, \code{.Rhistory}, \code{.Rdata}, \code{.httr-oauth}, \code{.DS_Store}, and \code{.quarto} to your global (a.k.a. user-level) \code{.gitignore}. This is good practice as it decreases the chance that you will accidentally leak credentials to GitHub. \code{git_vaccinate()} also tries to detect and fix the situation where you have a global gitignore file, but it's missing from your global Git config. } usethis/man/use_gitlab_ci.Rd0000644000176200001440000000441114651000165015567 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ci.R \name{use_gitlab_ci} \alias{use_gitlab_ci} \alias{use_circleci} \alias{use_circleci_badge} \title{Continuous integration setup and badges} \usage{ use_gitlab_ci() use_circleci(browse = rlang::is_interactive(), image = "rocker/verse:latest") use_circleci_badge(repo_spec = NULL) } \arguments{ \item{browse}{Open a browser window to enable automatic builds for the package.} \item{image}{The Docker image to use for build. Must be available on \href{https://hub.docker.com}{DockerHub}. The \href{https://hub.docker.com/r/rocker/verse}{rocker/verse} image includes TeXLive, pandoc, and the tidyverse packages. For a minimal image, try \href{https://hub.docker.com/r/rocker/r-ver}{rocker/r-ver}. To specify a version of R, change the tag from \code{latest} to the version you want, e.g. \verb{rocker/r-ver:3.5.3}.} \item{repo_spec}{Optional GitHub repo specification in this form: \code{owner/repo}. This can usually be inferred from the GitHub remotes of active project.} } \description{ \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#questioning}{\figure{lifecycle-questioning.svg}{options: alt='[Questioning]'}}}{\strong{[Questioning]}} These functions are not actively used by the tidyverse team, and may not currently work. Use at your own risk. Sets up third-party continuous integration (CI) services for an R package on GitLab or CircleCI. These functions: \itemize{ \item Add service-specific configuration files and add them to \code{.Rbuildignore}. \item Activate a service or give the user a detailed prompt. \item Provide the markdown to insert a badge into README. } } \section{\code{use_gitlab_ci()}}{ Adds a basic \code{.gitlab-ci.yml} to the top-level directory of a package. This is a configuration file for the \href{https://docs.gitlab.com/ee/ci/}{GitLab CI/CD} continuous integration service. } \section{\code{use_circleci()}}{ Adds a basic \code{.circleci/config.yml} to the top-level directory of a package. This is a configuration file for the \href{https://circleci.com/}{CircleCI} continuous integration service. } \section{\code{use_circleci_badge()}}{ Only adds the \href{https://circleci.com/}{Circle CI} badge. Use for a project where Circle CI is already configured. } usethis/man/use_pipe.Rd0000644000176200001440000000165414717524721014631 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/pipe.R \name{use_pipe} \alias{use_pipe} \title{Use magrittr's pipe in your package} \usage{ use_pipe(export = TRUE) } \arguments{ \item{export}{If \code{TRUE}, the file \code{R/utils-pipe.R} is added, which provides the roxygen template to import and re-export \verb{\%>\%}. If \code{FALSE}, the necessary roxygen directive is added, if possible, or otherwise instructions are given.} } \description{ Does setup necessary to use magrittr's pipe operator, \verb{\%>\%} in your package. This function requires the use of \pkg{roxygen2}. \itemize{ \item Adds magrittr to "Imports" in \code{DESCRIPTION}. \item Imports the pipe operator specifically, which is necessary for internal use. \item Exports the pipe operator, if \code{export = TRUE}, which is necessary to make \verb{\%>\%} available to the users of your package. } } \examples{ \dontrun{ use_pipe() } } usethis/man/use_rstudio_preferences.Rd0000644000176200001440000000124714651000165017730 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rstudio.R \name{use_rstudio_preferences} \alias{use_rstudio_preferences} \title{Set global RStudio preferences} \usage{ use_rstudio_preferences(...) } \arguments{ \item{...}{<\code{\link[rlang:dyn-dots]{dynamic-dots}}> Property-value pairs.} } \value{ A named list of the previous values, invisibly. } \description{ This function allows you to set global RStudio preferences, achieving the same effect programmatically as clicking buttons in RStudio's Global Options. You can find a list of configurable properties at \url{https://docs.posit.co/ide/server-pro/reference/session_user_settings.html}. } usethis/man/use_tibble.Rd0000644000176200001440000000271314651000165015116 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tibble.R \name{use_tibble} \alias{use_tibble} \title{Prepare to return a tibble} \usage{ use_tibble() } \description{ \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#questioning}{\figure{lifecycle-questioning.svg}{options: alt='[Questioning]'}}}{\strong{[Questioning]}} Does minimum setup such that a tibble returned by your package is handled using the tibble method for generics like \code{print()} or \code{[}. Presumably you care about this if you've chosen to store and expose an object with class \code{tbl_df}. Specifically: \itemize{ \item Check that the active package uses roxygen2 \item Add the tibble package to "Imports" in \code{DESCRIPTION} \item Prepare the roxygen directive necessary to import at least one function from tibble: \itemize{ \item If possible, the directive is inserted into existing package-level documentation, i.e. the roxygen snippet created by \code{\link[=use_package_doc]{use_package_doc()}} \item Otherwise, we issue advice on where the user should add the directive } } This is necessary when your package returns a stored data object that has class \code{tbl_df}, but the package code does not make direct use of functions from the tibble package. If you do nothing, the tibble namespace is not necessarily loaded and your tibble may therefore be printed and subsetted like a base \code{data.frame}. } \examples{ \dontrun{ use_tibble() } } usethis/man/use_release_issue.Rd0000644000176200001440000000310214651000165016476 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/release.R \name{use_release_issue} \alias{use_release_issue} \title{Create a release checklist in a GitHub issue} \usage{ use_release_issue(version = NULL) } \arguments{ \item{version}{Optional version number for release. If unspecified, you can make an interactive choice.} } \description{ When preparing to release a package to CRAN there are quite a few steps that need to be performed, and some of the steps can take multiple hours. This function creates a checklist in a GitHub issue to: \itemize{ \item Help you keep track of where you are in the process \item Feel a sense of satisfaction as you progress towards final submission \item Help watchers of your package stay informed. } The checklist contains a generic set of steps that we've found to be helpful, based on the type of release ("patch", "minor", or "major"). You're encouraged to edit the issue to customize this list to meet your needs. \subsection{Customization}{ \itemize{ \item If you want to consistently add extra bullets for every release, you can include your own custom bullets by providing an (unexported) \code{release_bullets()} function that returns a character vector. (For historical reasons, \code{release_questions()} is also supported). \item If you want to check additional packages in the revdep check process, provide an (unexported) \code{release_extra_revdeps()} function that returns a character vector. This is currently only supported for Posit internal check tooling. } } } \examples{ \dontrun{ use_release_issue("2.0.0") } } usethis/man/git_sitrep.Rd0000644000176200001440000000132114651000165015144 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{git_sitrep} \alias{git_sitrep} \title{Git/GitHub sitrep} \usage{ git_sitrep(tool = c("git", "github"), scope = c("user", "project")) } \arguments{ \item{tool}{Report for \strong{git}, or \strong{github}} \item{scope}{Report globally for the current \strong{user}, or locally for the current \strong{project}} } \description{ Get a situation report on your current Git/GitHub status. Useful for diagnosing problems. The default is to report all values; provide values for \code{tool} or \code{scope} to be more specific. } \examples{ \dontrun{ # report all git_sitrep() # report git for current user git_sitrep("git", "user") } } usethis/DESCRIPTION0000644000176200001440000000433414721320322013444 0ustar liggesusersPackage: usethis Title: Automate Package and Project Setup Version: 3.1.0 Authors@R: c( person("Hadley", "Wickham", , "hadley@posit.co", role = "aut", comment = c(ORCID = "0000-0003-4757-117X")), person("Jennifer", "Bryan", , "jenny@posit.co", role = c("aut", "cre"), comment = c(ORCID = "0000-0002-6983-2759")), person("Malcolm", "Barrett", , "malcolmbarrett@gmail.com", role = "aut", comment = c(ORCID = "0000-0003-0299-5825")), person("Andy", "Teucher", , "andy.teucher@posit.co", role = "aut", comment = c(ORCID = "0000-0002-7840-692X")), person("Posit Software, PBC", role = c("cph", "fnd")) ) Description: Automate package and project setup tasks that are otherwise performed manually. This includes setting up unit testing, test coverage, continuous integration, Git, 'GitHub', licenses, 'Rcpp', 'RStudio' projects, and more. License: MIT + file LICENSE URL: https://usethis.r-lib.org, https://github.com/r-lib/usethis BugReports: https://github.com/r-lib/usethis/issues Depends: R (>= 3.6) Imports: cli (>= 3.0.1), clipr (>= 0.3.0), crayon, curl (>= 2.7), desc (>= 1.4.2), fs (>= 1.3.0), gert (>= 1.4.1), gh (>= 1.2.1), glue (>= 1.3.0), jsonlite, lifecycle (>= 1.0.0), purrr, rappdirs, rlang (>= 1.1.0), rprojroot (>= 1.2), rstudioapi, stats, tools, utils, whisker, withr (>= 2.3.0), yaml Suggests: covr, knitr, magick, pkgload (>= 1.3.2.1), rmarkdown, roxygen2 (>= 7.1.2), spelling (>= 1.2), styler (>= 1.2.0), testthat (>= 3.1.8) Config/Needs/website: r-lib/asciicast, tidyverse/tidytemplate, xml2 Config/testthat/edition: 3 Config/testthat/parallel: TRUE Config/testthat/start-first: github-actions, release Encoding: UTF-8 Language: en-US RoxygenNote: 7.3.2 NeedsCompilation: no Packaged: 2024-11-25 22:12:19 UTC; jenny Author: Hadley Wickham [aut] (), Jennifer Bryan [aut, cre] (), Malcolm Barrett [aut] (), Andy Teucher [aut] (), Posit Software, PBC [cph, fnd] Maintainer: Jennifer Bryan Repository: CRAN Date/Publication: 2024-11-26 10:20:02 UTC