remotes/0000755000176200001440000000000013622037662011737 5ustar liggesusersremotes/NAMESPACE0000644000176200001440000000612213621326547013161 0ustar liggesusers# Generated by roxygen2: do not edit by hand S3method(format,bioc_git2r_remote) S3method(format,bioc_xgit_remote) S3method(format,bitbucket_remote) S3method(format,cran_remote) S3method(format,git2r_remote) S3method(format,github_remote) S3method(format,gitlab_remote) S3method(format,local_remote) S3method(format,remotes) S3method(format,url_remote) S3method(format,xgit_remote) S3method(github_resolve_ref,"NULL") S3method(github_resolve_ref,default) S3method(github_resolve_ref,github_pull) S3method(github_resolve_ref,github_release) S3method(print,package_deps) S3method(remote_download,bioc_git2r_remote) S3method(remote_download,bioc_xgit_remote) S3method(remote_download,bitbucket_remote) S3method(remote_download,git2r_remote) S3method(remote_download,github_remote) S3method(remote_download,gitlab_remote) S3method(remote_download,local_remote) S3method(remote_download,svn_remote) S3method(remote_download,url_remote) S3method(remote_download,xgit_remote) S3method(remote_metadata,bioc_git2r_remote) S3method(remote_metadata,bioc_xgit_remote) S3method(remote_metadata,bitbucket_remote) S3method(remote_metadata,git2r_remote) S3method(remote_metadata,github_remote) S3method(remote_metadata,gitlab_remote) S3method(remote_metadata,local_remote) S3method(remote_metadata,svn_remote) S3method(remote_metadata,url_remote) S3method(remote_metadata,xgit_remote) S3method(remote_package_name,bioc_git2r_remote) S3method(remote_package_name,bioc_xgit_remote) S3method(remote_package_name,bitbucket_remote) S3method(remote_package_name,cran_remote) S3method(remote_package_name,git2r_remote) S3method(remote_package_name,github_remote) S3method(remote_package_name,gitlab_remote) S3method(remote_package_name,local_remote) S3method(remote_package_name,svn_remote) S3method(remote_package_name,url_remote) S3method(remote_package_name,xgit_remote) S3method(remote_sha,bioc_git2r_remote) S3method(remote_sha,bitbucket_remote) S3method(remote_sha,cran_remote) S3method(remote_sha,git2r_remote) S3method(remote_sha,github_remote) S3method(remote_sha,gitlab_remote) S3method(remote_sha,local_remote) S3method(remote_sha,svn_remote) S3method(remote_sha,url_remote) S3method(remote_sha,xgit_remote) S3method(update,package_deps) export(available_packages) export(available_packages_reset) export(available_packages_set) export(bioc_install_repos) export(bioc_version) export(dev_package_deps) export(download_version) export(git_credentials) export(github_pull) export(github_release) export(gitlab_pat) export(install_bioc) export(install_bitbucket) export(install_cran) export(install_deps) export(install_dev) export(install_git) export(install_github) export(install_gitlab) export(install_local) export(install_svn) export(install_url) export(install_version) export(local_package_deps) export(package_deps) export(parse_github_repo_spec) export(parse_github_url) export(parse_repo_spec) export(standardise_dep) export(update_packages) importFrom(stats,update) importFrom(tools,file_ext) importFrom(utils,available.packages) importFrom(utils,compareVersion) importFrom(utils,contrib.url) importFrom(utils,install.packages) importFrom(utils,read.delim) remotes/README.md0000644000176200001440000002211113621300305013175 0ustar liggesusers # remotes > Install R Packages from remote or local repositories, > including GitHub, GitLab, Bitbucket, and Bioconductor [![Project Status: Active - The project has reached a stable, usable state and is being actively developed.](http://www.repostatus.org/badges/latest/active.svg)](http://www.repostatus.org/#active) [![Linux Build Status](https://travis-ci.org/r-lib/remotes.svg?branch=master)](https://travis-ci.org/r-lib/remotes) [![Windows Build status](https://ci.appveyor.com/api/projects/status/github/r-lib/remotes?svg=true)](https://ci.appveyor.com/project/gaborcsardi/remotes) [![](https://www.r-pkg.org/badges/version/remotes)](https://www.r-pkg.org/pkg/remotes) [![CRAN RStudio mirror downloads](https://cranlogs.r-pkg.org/badges/remotes)](https://www.r-pkg.org/pkg/remotes) [![Coverage Status](https://img.shields.io/codecov/c/github/r-lib/remotes/master.svg)](https://codecov.io/github/r-lib/remotes?branch=master) Download and install R packages stored in GitHub, GitLab, Bitbucket, Bioconductor, or plain subversion or git repositories. This package is a lightweight replacement of the `install_*` functions in [`devtools`](https://github.com/r-lib/devtools). Indeed most of the code was copied over from `devtools`. ## Features * Installers: * Install packages with their dependencies. * Install from GitHub, GitLab, Bitbucket. * Install from git and subversion repositories. * Install from local files or URLs. * Install the dependencies of a local package tree. * Install specific package versions from CRAN. * Supports [Bioconductor](https://bioconductor.org/) packages. * Supports the `Remotes` field in `DESCRIPTION`. See more in the [dependencies](https://github.com/r-lib/remotes/blob/master/vignettes/dependencies.Rmd) vignette. * Supports the `Additional_repositories` field in `DESCRIPTION`. * Can install itself from GitHub (see below). * Does not depend on other R packages. * Does not contain compiled code, so no compiler is needed. * Does not need any external software (for most of the functionality at least). ## Installation Install the released version of remotes from CRAN: ```r install.packages("remotes") ``` You can also install remotes from GitHub. If you already have a previous version of remotes installed, you can use that to install the development version: ```r remotes::install_github("r-lib/remotes") ``` Alternatively, you can also call the supplied `install-github.R` file directly, from within R: ```r source("https://raw.githubusercontent.com/r-lib/remotes/master/install-github.R")$value("r-lib/remotes") ``` The service is also based on remotes. You can use it to install any R package from GitHub via sourcing a URL. E.g. to install remotes itself: ```r source("https://install-github.me/r-lib/remotes") ``` ## Usage Note that most of the examples here use GitHub. See below for other supported repository types. To install the latest version of a package in the `master` branch from GitHub, you can use the `user/repo` form. Note that `user` can also be an organization: ```r remotes::install_github("r-lib/conflicted") ``` If the R package is inside a subdirectory of the root directory, then give this subdirectory as well: ```r # build = FALSE because of some specificities of XGBoost package install_github("dmlc/xgboost/R-package", build = FALSE) ``` To install a certain branch or commit or tag, append it to the repo name, after an `@`: ```r remotes::install_github("gaborcsardi/pkgconfig@v2.0.0") ``` To install the latest release, append `@*release` to the repo name: ```r remotes::install_github("gaborcsardi/pkgconfig@*release") ``` To install a pull request, append `#` and the id (an integer number) of the pull request to the repo name: ```r remotes::install_github("r-lib/pkgconfig#7") ``` ### Dependencies Dependencies are automatically installed from CRAN. By default, outdated dependencies are automatically upgraded. In interactive sessions you can select a subset of the dependencies to upgrade. #### Dependencies on GitHub It is also possible to install dependencies from GitHub or other supported repositories. For this you need to add a `Remotes` field to the `DESCRIPTION` file. Its format is: ``` Remotes: [remote::]repo_spec, [remote::]repo_spec, ... ``` where `repo_spec` is any repository specification the corresponding `install_()` function can handle. If `remote::` is missing, `github::` is assumed. Other possible values: `gitlab::`,`bitbucket::`, `git::`, `local::`, `svn::`, `url::`, `version::`, `cran::`, `bioc::`. See more about the `Remotes` field in this [vignette](https://remotes.r-lib.org/articles/dependencies.html). #### Additional repositories remotes supports the `Additional_repositories` field in `DESCRIPTION`. This is a way to specify dependencies from non-CRAN package repositories. See the [Writing R extensions](https://cran.r-project.org/doc/manuals/r-release/R-exts.html#Package-Dependencies) manual for details. #### Bioconductor packages Bioconductor packages are automatically detected and their dependencies are installed from Bioconductor. #### Currently supported remote types * GitHub repositories via `install_github`. * Bitbucket repositories via `install_bitbucket`. * Generic git repositories via `install_git`. They need either a system git installation, or the [git2r](https://github.com/ropensci/git2r) R package. * Local directories or package archive files via `install_local`. * Remote package archive files via `install_url`. * Packages in subversion repositories via `install_svn`. They need a system subversion installation. * Specific package versions from CRAN or other CRAN-like repositories via `install_version`. This includes outdated and archived packages as well. * All dependencies of a package in a local directory via `install_deps`. ### Download methods * For R older than 3.2, the curl package is required as remotes falls back to `curl::curl_download` in that case * For R newer than 3.3, default `download.file()` method is used. (`method = "auto"`) * For in between versions, * `method = "wininet"` is used on windows OS * `method = "libcurl"` is used on other OS, if available. See `help("download.file")` for informations on these methods and for setting proxies if needed. ### Standalone mode remotes will use the curl, git2r and pkgbuild packages if they are installed to provide faster implementations for some aspects of the install process. However if you are using remotes to install or update these packages (or their reverse dependencies) using them during installation may fail (particularly on Windows). If you set the environment variable `R_REMOTES_STANDALONE="true"` (e.g. in R `Sys.setenv(R_REMOTES_STANDALONE="true")`) you can force remotes to operate in standalone mode and use only its internal R implementations. This will allow successful installation of these packages. ### Options remotes uses the following standard R options, see `?options` for their details: * `download.file.method` for the default download method. See `?download.file`. * `pkgType` for the package type (source or binary, see manual) to install, download or look up dependencies for. * `repos` for the locations of the user's standard CRAN(-like) repositories. It also uses some remotes specific options: * `BioC_git` for the URL of the default Bioconductor git mirror. * `BioC_mirror` for the URL of the Bioconductor mirror. * `unzip` for the path of the external `unzip` program. ### Environment variables * The `BITBUCKET_USER` and `BITBUCKET_PASSWORD` environment variables are used for the default Bitbucket user name and password, in `install_bitbucket()` * The `GITHUB_PAT` environment variable is used as the default GitHub personal access token for all GitHub API queries. * The `R_BIOC_MIRROR` environment variable can be used to specify an alternative Bioconductor mirror. (The `BioC_mirror` option takes precedence over this.) * The `R_BIOC_VERSION` environment variable can be used to force a Bioconductor version. * The `R_REMOTES_UPGRADE` environment variable can be used to set a default preferred value for the `upgrade =` argument accepted by the various `install_*()` functions. For example, you can set `R_REMOTES_UPGRADE="always"` to upgrade dependent packages without asking the user. * Setting `R_REMOTES_STANDALONE="true"` forces remotes to work in standalone mode and avoid loading its optional dependencies (curl, git2 and pkgbuild currently. See "Standalone mode" above. * Setting `R_REMOTES_NO_ERRORS_FROM_WARNINGS="true"` avoids stopping the installation for warning messages. Warnings usually mean installation errors, so by default remotes stops for a warning. However, sometimes other warnings might happen, that could be ignored by setting this environment variable. * Setting `_R_CHECK_FORCE_SUGGESTS_="false"` while `R_REMOTES_NO_ERRORS_FROM_WARNINGS` is unset will also avoid stopping the installation for error messages. This is done because a warning is generated during installation when not all Suggested packages are not available. ## License GPL (>= 2) © Mango Solutions, RStudio remotes/man/0000755000176200001440000000000013621300305012474 5ustar liggesusersremotes/man/bioc_install_repos.Rd0000644000176200001440000000305713621303675016657 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/bioc.R \name{bioc_version} \alias{bioc_version} \alias{bioc_install_repos} \title{Tools for Bioconductor repositories} \usage{ bioc_version(r_ver = getRversion()) bioc_install_repos(r_ver = getRversion(), bioc_ver = bioc_version(r_ver)) } \arguments{ \item{r_ver}{R version to use. For \code{bioc_install_repos()} it is ignored if \code{bioc_ver} is specified.} \item{bioc_ver}{Bioconductor version to use. Defaults to the default one corresponding to \code{r_ver}.} } \value{ \code{bioc_version()} returns a Bioconductor version, a \code{package_version} object. \code{bioc_install_repos()} returns a named character vector of the URLs of the Bioconductor repositories, appropriate for the current or the specified R version. } \description{ \code{bioc_version()} returns the Bioconductor version for the current or the specified R version. } \details{ \code{bioc_install_repos()} deduces the URLs of the Bioconductor repositories. Both functions observe the \code{R_BIOC_VERSION} environment variable, which can be set to force a Bioconductor version. If this is set, then the \code{r_ver} and \code{bioc_ver} arguments are ignored. \code{bioc_install_repos()} observes the \code{R_BIOC_MIRROR} environment variable and also the \code{BioC_mirror} option, which can be set to the desired Bioconductor mirror. The option takes precedence if both are set. Its default value is \verb{https://bioconductor.org}. } \examples{ bioc_version() bioc_version("3.4") bioc_install_repos() } \keyword{internal} remotes/man/install_git.Rd0000644000176200001440000000730113621303675015312 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-git.R \name{install_git} \alias{install_git} \title{Install a package from a git repository} \usage{ install_git( url, subdir = NULL, ref = NULL, branch = NULL, credentials = git_credentials(), git = c("auto", "git2r", "external"), dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ... ) } \arguments{ \item{url}{Location of package. The url should point to a public or private repository.} \item{subdir}{A sub-directory within a git repository that may contain the package we are interested in installing.} \item{ref}{Name of branch, tag or SHA reference to use, if not HEAD.} \item{branch}{Deprecated, synonym for ref.} \item{credentials}{A git2r credentials object passed through to clone. Supplying this argument implies using \code{git2r} with \code{git}.} \item{git}{Whether to use the \code{git2r} package, or an external git client via system. Default is \code{git2r} if it is installed, otherwise an external git installation.} \item{dependencies}{Which dependencies do you want to check? Can be a character vector (selecting from "Depends", "Imports", "LinkingTo", "Suggests", or "Enhances"), or a logical vector. \code{TRUE} is shorthand for "Depends", "Imports", "LinkingTo" and "Suggests". \code{NA} is shorthand for "Depends", "Imports" and "LinkingTo" and is the default. \code{FALSE} is shorthand for no dependencies (i.e. just check this package, not its dependencies).} \item{upgrade}{One of "default", "ask", "always", or "never". "default" respects the value of the \code{R_REMOTES_UPGRADE} environment variable if set, and falls back to "ask" if unset. "ask" prompts the user for which out of date packages to upgrade. For non-interactive sessions "ask" is equivalent to "always". \code{TRUE} and \code{FALSE} are also accepted and correspond to "always" and "never" respectively.} \item{force}{Force installation, even if the remote state has not changed since the previous install.} \item{quiet}{If \code{TRUE}, suppress output.} \item{build}{If \code{TRUE} build the package before installing.} \item{build_opts}{Options to pass to \verb{R CMD build}, only used when \code{build}} \item{build_manual}{If \code{FALSE}, don't build PDF manual ('--no-manual').} \item{build_vignettes}{If \code{FALSE}, don't build package vignettes ('--no-build-vignettes'). is \code{TRUE}.} \item{repos}{A character vector giving repositories to use.} \item{type}{Type of package to \code{update}.} \item{...}{Other arguments passed on to \code{\link[utils:install.packages]{utils::install.packages()}}.} } \description{ It is vectorised so you can install multiple packages with a single command. You do not need to have the \code{git2r} package, or an external git client installed. } \details{ If you need to set git credentials for use in the \code{Remotes} field you can do so by placing the credentials in the \code{remotes.git_credentials} global option. } \examples{ \dontrun{ install_git("git://github.com/hadley/stringr.git") install_git("git://github.com/hadley/stringr.git", ref = "stringr-0.2") } } \seealso{ Other package installation: \code{\link{install_bioc}()}, \code{\link{install_bitbucket}()}, \code{\link{install_cran}()}, \code{\link{install_dev}()}, \code{\link{install_github}()}, \code{\link{install_gitlab}()}, \code{\link{install_local}()}, \code{\link{install_svn}()}, \code{\link{install_url}()}, \code{\link{install_version}()} } \concept{package installation} remotes/man/install_bitbucket.Rd0000644000176200001440000001172413621303675016507 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-bitbucket.R \name{install_bitbucket} \alias{install_bitbucket} \title{Install a package directly from Bitbucket} \usage{ install_bitbucket( repo, ref = "master", subdir = NULL, auth_user = bitbucket_user(), password = bitbucket_password(), host = "api.bitbucket.org/2.0", dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ... ) } \arguments{ \item{repo}{Repository address in the format \verb{username/repo[/subdir][@ref|#pull]}. Alternatively, you can specify \code{subdir} and/or \code{ref} using the respective parameters (see below); if both is specified, the values in \code{repo} take precedence.} \item{ref}{Desired git reference; could be a commit, tag, or branch name. Defaults to master.} \item{subdir}{subdirectory within repo that contains the R package.} \item{auth_user}{your account username if you're attempting to install a package hosted in a private repository (and your username is different to \code{username}). Defaults to the \code{BITBUCKET_USER} environment variable.} \item{password}{your password. Defaults to the \code{BITBUCKET_PASSWORD} environment variable. See details for further information on setting up a password.} \item{host}{GitHub API host to use. Override with your GitHub enterprise hostname, for example, \code{"github.hostname.com/api/v3"}.} \item{dependencies}{Which dependencies do you want to check? Can be a character vector (selecting from "Depends", "Imports", "LinkingTo", "Suggests", or "Enhances"), or a logical vector. \code{TRUE} is shorthand for "Depends", "Imports", "LinkingTo" and "Suggests". \code{NA} is shorthand for "Depends", "Imports" and "LinkingTo" and is the default. \code{FALSE} is shorthand for no dependencies (i.e. just check this package, not its dependencies).} \item{upgrade}{One of "default", "ask", "always", or "never". "default" respects the value of the \code{R_REMOTES_UPGRADE} environment variable if set, and falls back to "ask" if unset. "ask" prompts the user for which out of date packages to upgrade. For non-interactive sessions "ask" is equivalent to "always". \code{TRUE} and \code{FALSE} are also accepted and correspond to "always" and "never" respectively.} \item{force}{Force installation, even if the remote state has not changed since the previous install.} \item{quiet}{If \code{TRUE}, suppress output.} \item{build}{If \code{TRUE} build the package before installing.} \item{build_opts}{Options to pass to \verb{R CMD build}, only used when \code{build}} \item{build_manual}{If \code{FALSE}, don't build PDF manual ('--no-manual').} \item{build_vignettes}{If \code{FALSE}, don't build package vignettes ('--no-build-vignettes'). is \code{TRUE}.} \item{repos}{A character vector giving repositories to use.} \item{type}{Type of package to \code{update}.} \item{...}{Other arguments passed on to \code{\link[utils:install.packages]{utils::install.packages()}}.} } \description{ This function is vectorised so you can install multiple packages in a single command. } \details{ To install from a private repo, or more generally, access the Bitbucket API with your own credentials, you will need to get an access token. You can create an access token following the instructions found in the \href{https://confluence.atlassian.com/bitbucket/app-passwords-828781300.html}{Bitbucket App Passwords documentation}. The App Password requires read-only access to your repositories and pull requests. Then store your password in the environment variable \code{BITBUCKET_PASSWORD} (e.g. \code{evelynwaugh:swordofhonour}) Note that on Windows, authentication requires the "libcurl" download method. You can set the default download method via the \code{download.file.method} option:\preformatted{options(download.file.method = "libcurl") } In particular, if unset, RStudio sets the download method to "wininet". To override this, you might want to set it to "libcurl" in your R profile, see \link[base:Startup]{base::Startup}. The caveat of the "libcurl" method is that it does \emph{not} set the system proxies automatically, see "Setting Proxies" in \code{\link[utils:download.file]{utils::download.file()}}. } \examples{ \dontrun{ install_bitbucket("sulab/mygene.r@default") install_bitbucket("djnavarro/lsr") } } \seealso{ Bitbucket API docs: \url{https://confluence.atlassian.com/bitbucket/use-the-bitbucket-cloud-rest-apis-222724129.html} Other package installation: \code{\link{install_bioc}()}, \code{\link{install_cran}()}, \code{\link{install_dev}()}, \code{\link{install_github}()}, \code{\link{install_gitlab}()}, \code{\link{install_git}()}, \code{\link{install_local}()}, \code{\link{install_svn}()}, \code{\link{install_url}()}, \code{\link{install_version}()} } \concept{package installation} remotes/man/install_deps.Rd0000644000176200001440000000420713621303675015464 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install.R \name{install_deps} \alias{install_deps} \title{Install package dependencies if needed.} \usage{ install_deps( pkgdir = ".", dependencies = NA, repos = getOption("repos"), type = getOption("pkgType"), upgrade = c("default", "ask", "always", "never"), quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, ... ) } \arguments{ \item{pkgdir}{path to a package directory, or to a package tarball.} \item{dependencies}{Which dependencies do you want to check? Can be a character vector (selecting from "Depends", "Imports", "LinkingTo", "Suggests", or "Enhances"), or a logical vector. \code{TRUE} is shorthand for "Depends", "Imports", "LinkingTo" and "Suggests". \code{NA} is shorthand for "Depends", "Imports" and "LinkingTo" and is the default. \code{FALSE} is shorthand for no dependencies (i.e. just check this package, not its dependencies).} \item{repos}{A character vector giving repositories to use.} \item{type}{Type of package to \code{update}.} \item{upgrade}{One of "default", "ask", "always", or "never". "default" respects the value of the \code{R_REMOTES_UPGRADE} environment variable if set, and falls back to "ask" if unset. "ask" prompts the user for which out of date packages to upgrade. For non-interactive sessions "ask" is equivalent to "always". \code{TRUE} and \code{FALSE} are also accepted and correspond to "always" and "never" respectively.} \item{quiet}{If \code{TRUE}, suppress output.} \item{build}{If \code{TRUE} build the package before installing.} \item{build_opts}{Options to pass to \verb{R CMD build}, only used when \code{build}} \item{build_manual}{If \code{FALSE}, don't build PDF manual ('--no-manual').} \item{build_vignettes}{If \code{FALSE}, don't build package vignettes ('--no-build-vignettes'). is \code{TRUE}.} \item{...}{additional arguments passed to \code{\link[utils:install.packages]{utils::install.packages()}}.} } \description{ Install package dependencies if needed. } \examples{ \dontrun{install_deps(".")} } remotes/man/package_deps.Rd0000644000176200001440000000663513621303675015420 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/deps.R \name{package_deps} \alias{package_deps} \alias{local_package_deps} \alias{dev_package_deps} \alias{update.package_deps} \title{Find all dependencies of a CRAN or dev package.} \usage{ package_deps( packages, dependencies = NA, repos = getOption("repos"), type = getOption("pkgType") ) local_package_deps(pkgdir = ".", dependencies = NA) dev_package_deps( pkgdir = ".", dependencies = NA, repos = getOption("repos"), type = getOption("pkgType") ) \method{update}{package_deps}( object, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ... ) } \arguments{ \item{packages}{A character vector of package names.} \item{dependencies}{Which dependencies do you want to check? Can be a character vector (selecting from "Depends", "Imports", "LinkingTo", "Suggests", or "Enhances"), or a logical vector. \code{TRUE} is shorthand for "Depends", "Imports", "LinkingTo" and "Suggests". \code{NA} is shorthand for "Depends", "Imports" and "LinkingTo" and is the default. \code{FALSE} is shorthand for no dependencies (i.e. just check this package, not its dependencies).} \item{repos}{A character vector giving repositories to use.} \item{type}{Type of package to \code{update}.} \item{pkgdir}{path to a package directory, or to a package tarball.} \item{object}{A \code{package_deps} object.} \item{upgrade}{One of "default", "ask", "always", or "never". "default" respects the value of the \code{R_REMOTES_UPGRADE} environment variable if set, and falls back to "ask" if unset. "ask" prompts the user for which out of date packages to upgrade. For non-interactive sessions "ask" is equivalent to "always". \code{TRUE} and \code{FALSE} are also accepted and correspond to "always" and "never" respectively.} \item{force}{Force installation, even if the remote state has not changed since the previous install.} \item{quiet}{If \code{TRUE}, suppress output.} \item{build}{If \code{TRUE} build the package before installing.} \item{build_opts}{Options to pass to \verb{R CMD build}, only used when \code{build}} \item{build_manual}{If \code{FALSE}, don't build PDF manual ('--no-manual').} \item{build_vignettes}{If \code{FALSE}, don't build package vignettes ('--no-build-vignettes'). is \code{TRUE}.} \item{...}{Additional arguments passed to \code{install_packages}.} } \value{ A \code{data.frame} with columns: \tabular{ll}{ \code{package} \tab The dependent package's name,\cr \code{installed} \tab The currently installed version,\cr \code{available} \tab The version available on CRAN,\cr \code{diff} \tab An integer denoting whether the locally installed version of the package is newer (1), the same (0) or older (-1) than the version currently available on CRAN.\cr } } \description{ Find all the dependencies of a package and determine whether they are ahead or behind CRAN. A \code{print()} method identifies mismatches (if any) between local and CRAN versions of each dependent package; an \code{update()} method installs outdated or missing packages from CRAN. } \examples{ \dontrun{ package_deps("devtools") # Use update to update any out-of-date dependencies update(package_deps("devtools")) } } remotes/man/update_packages.Rd0000644000176200001440000000505313621303675016123 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/deps.R \name{update_packages} \alias{update_packages} \title{Update packages that are missing or out-of-date.} \usage{ update_packages( packages = TRUE, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ... ) } \arguments{ \item{packages}{Character vector of packages to update.} \item{dependencies}{Which dependencies do you want to check? Can be a character vector (selecting from "Depends", "Imports", "LinkingTo", "Suggests", or "Enhances"), or a logical vector. \code{TRUE} is shorthand for "Depends", "Imports", "LinkingTo" and "Suggests". \code{NA} is shorthand for "Depends", "Imports" and "LinkingTo" and is the default. \code{FALSE} is shorthand for no dependencies (i.e. just check this package, not its dependencies).} \item{upgrade}{One of "default", "ask", "always", or "never". "default" respects the value of the \code{R_REMOTES_UPGRADE} environment variable if set, and falls back to "ask" if unset. "ask" prompts the user for which out of date packages to upgrade. For non-interactive sessions "ask" is equivalent to "always". \code{TRUE} and \code{FALSE} are also accepted and correspond to "always" and "never" respectively.} \item{force}{Force installation, even if the remote state has not changed since the previous install.} \item{quiet}{If \code{TRUE}, suppress output.} \item{build}{If \code{TRUE} build the package before installing.} \item{build_opts}{Options to pass to \verb{R CMD build}, only used when \code{build}} \item{build_manual}{If \code{FALSE}, don't build PDF manual ('--no-manual').} \item{build_vignettes}{If \code{FALSE}, don't build package vignettes ('--no-build-vignettes'). is \code{TRUE}.} \item{repos}{A character vector giving repositories to use.} \item{type}{Type of package to \code{update}.} \item{...}{Other arguments passed on to \code{\link[utils:install.packages]{utils::install.packages()}}.} } \description{ Works similarly to \code{\link[utils:install.packages]{utils::install.packages()}} but doesn't install packages that are already installed, and also upgrades out dated dependencies. } \examples{ \dontrun{ update_packages("ggplot2") update_packages(c("plyr", "ggplot2")) } } \seealso{ \code{\link[=package_deps]{package_deps()}} to see which packages are out of date/ missing. } remotes/man/install_url.Rd0000644000176200001440000000553513621303675015340 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-url.R \name{install_url} \alias{install_url} \title{Install a package from a url} \usage{ install_url( url, subdir = NULL, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ... ) } \arguments{ \item{url}{location of package on internet. The url should point to a zip file, a tar file or a bzipped/gzipped tar file.} \item{subdir}{subdirectory within url bundle that contains the R package.} \item{dependencies}{Which dependencies do you want to check? Can be a character vector (selecting from "Depends", "Imports", "LinkingTo", "Suggests", or "Enhances"), or a logical vector. \code{TRUE} is shorthand for "Depends", "Imports", "LinkingTo" and "Suggests". \code{NA} is shorthand for "Depends", "Imports" and "LinkingTo" and is the default. \code{FALSE} is shorthand for no dependencies (i.e. just check this package, not its dependencies).} \item{upgrade}{One of "default", "ask", "always", or "never". "default" respects the value of the \code{R_REMOTES_UPGRADE} environment variable if set, and falls back to "ask" if unset. "ask" prompts the user for which out of date packages to upgrade. For non-interactive sessions "ask" is equivalent to "always". \code{TRUE} and \code{FALSE} are also accepted and correspond to "always" and "never" respectively.} \item{force}{Force installation, even if the remote state has not changed since the previous install.} \item{quiet}{If \code{TRUE}, suppress output.} \item{build}{If \code{TRUE} build the package before installing.} \item{build_opts}{Options to pass to \verb{R CMD build}, only used when \code{build}} \item{build_manual}{If \code{FALSE}, don't build PDF manual ('--no-manual').} \item{build_vignettes}{If \code{FALSE}, don't build package vignettes ('--no-build-vignettes'). is \code{TRUE}.} \item{repos}{A character vector giving repositories to use.} \item{type}{Type of package to \code{update}.} \item{...}{Other arguments passed on to \code{\link[utils:install.packages]{utils::install.packages()}}.} } \description{ This function is vectorised so you can install multiple packages in a single command. } \examples{ \dontrun{ install_url("https://github.com/hadley/stringr/archive/master.zip") } } \seealso{ Other package installation: \code{\link{install_bioc}()}, \code{\link{install_bitbucket}()}, \code{\link{install_cran}()}, \code{\link{install_dev}()}, \code{\link{install_github}()}, \code{\link{install_gitlab}()}, \code{\link{install_git}()}, \code{\link{install_local}()}, \code{\link{install_svn}()}, \code{\link{install_version}()} } \concept{package installation} remotes/man/install_github.Rd0000644000176200001440000001036013621303675016010 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-github.R \name{install_github} \alias{install_github} \title{Attempts to install a package directly from GitHub.} \usage{ install_github( repo, ref = "master", subdir = NULL, auth_token = github_pat(quiet), host = "api.github.com", dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ... ) } \arguments{ \item{repo}{Repository address in the format \verb{username/repo[/subdir][@ref|#pull]}. Alternatively, you can specify \code{subdir} and/or \code{ref} using the respective parameters (see below); if both is specified, the values in \code{repo} take precedence.} \item{ref}{Desired git reference. Could be a commit, tag, or branch name, or a call to \code{\link[=github_pull]{github_pull()}}. Defaults to \code{"master"}.} \item{subdir}{subdirectory within repo that contains the R package.} \item{auth_token}{To install from a private repo, generate a personal access token (PAT) in "https://github.com/settings/tokens" and supply to this argument. This is safer than using a password because you can easily delete a PAT without affecting any others. Defaults to the \code{GITHUB_PAT} environment variable.} \item{host}{GitHub API host to use. Override with your GitHub enterprise hostname, for example, \code{"github.hostname.com/api/v3"}.} \item{dependencies}{Which dependencies do you want to check? Can be a character vector (selecting from "Depends", "Imports", "LinkingTo", "Suggests", or "Enhances"), or a logical vector. \code{TRUE} is shorthand for "Depends", "Imports", "LinkingTo" and "Suggests". \code{NA} is shorthand for "Depends", "Imports" and "LinkingTo" and is the default. \code{FALSE} is shorthand for no dependencies (i.e. just check this package, not its dependencies).} \item{upgrade}{One of "default", "ask", "always", or "never". "default" respects the value of the \code{R_REMOTES_UPGRADE} environment variable if set, and falls back to "ask" if unset. "ask" prompts the user for which out of date packages to upgrade. For non-interactive sessions "ask" is equivalent to "always". \code{TRUE} and \code{FALSE} are also accepted and correspond to "always" and "never" respectively.} \item{force}{Force installation, even if the remote state has not changed since the previous install.} \item{quiet}{If \code{TRUE}, suppress output.} \item{build}{If \code{TRUE} build the package before installing.} \item{build_opts}{Options to pass to \verb{R CMD build}, only used when \code{build}} \item{build_manual}{If \code{FALSE}, don't build PDF manual ('--no-manual').} \item{build_vignettes}{If \code{FALSE}, don't build package vignettes ('--no-build-vignettes'). is \code{TRUE}.} \item{repos}{A character vector giving repositories to use.} \item{type}{Type of package to \code{update}.} \item{...}{Other arguments passed on to \code{\link[utils:install.packages]{utils::install.packages()}}.} } \description{ This function is vectorised on \code{repo} so you can install multiple packages in a single command. } \details{ If the repository uses submodules a command-line git client is required to clone the submodules. } \examples{ \dontrun{ install_github("klutometis/roxygen") install_github("wch/ggplot2") install_github(c("rstudio/httpuv", "rstudio/shiny")) install_github(c("hadley/httr@v0.4", "klutometis/roxygen#142", "mfrasca/r-logging/pkg")) # To install from a private repo, use auth_token with a token # from https://github.com/settings/tokens. You only need the # repo scope. Best practice is to save your PAT in env var called # GITHUB_PAT. install_github("hadley/private", auth_token = "abc") } } \seealso{ \code{\link[=github_pull]{github_pull()}} Other package installation: \code{\link{install_bioc}()}, \code{\link{install_bitbucket}()}, \code{\link{install_cran}()}, \code{\link{install_dev}()}, \code{\link{install_gitlab}()}, \code{\link{install_git}()}, \code{\link{install_local}()}, \code{\link{install_svn}()}, \code{\link{install_url}()}, \code{\link{install_version}()} } \concept{package installation} remotes/man/available_packages.Rd0000644000176200001440000000225013621303675016555 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/cran.R \name{available_packages_set} \alias{available_packages_set} \alias{available_packages_reset} \alias{available_packages} \title{Simpler available.packages} \usage{ available_packages_set(repos, type, db) available_packages_reset() available_packages(repos = getOption("repos"), type = getOption("pkgType")) } \arguments{ \item{repos}{ character vector, the base URL(s) of the repositories to use. } \item{type}{ character string, indicate which type of packages: see \code{\link[utils]{install.packages}}. If \code{type = "both"} this will use the source repository. } } \description{ This is mostly equivalent to \code{\link[utils:available.packages]{utils::available.packages()}} however it also caches the full result. Additionally the cache can be assigned explicitly with \code{\link[=available_packages_set]{available_packages_set()}} and reset (cleared) with \code{\link[=available_packages_reset]{available_packages_reset()}}. } \seealso{ \code{\link[utils:available.packages]{utils::available.packages()}} for full documentation on the output format. } \keyword{internal} remotes/man/install_dev.Rd0000644000176200001440000000266713621303675015317 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-dev.R \name{install_dev} \alias{install_dev} \title{Install the development version of a package} \usage{ install_dev(package, cran_url = getOption("repos")[["CRAN"]], ...) } \arguments{ \item{package}{The package name to install.} \item{cran_url}{The URL of the CRAN mirror to use, by default based on the 'repos' option. If unset uses 'https://cloud.r-project.org'.} \item{...}{Additional arguments passed to \code{\link[=install_github]{install_github()}}, \code{\link[=install_gitlab]{install_gitlab()}}, or \code{\link[=install_bitbucket]{install_bitbucket()}} functions.} } \description{ \code{install_dev()} retrieves the package DESCRIPTION from the CRAN mirror and looks in the 'URL' and 'BugReports' fields for GitHub, GitLab or Bitbucket URLs. It then calls the appropriate \code{install_()} function to install the development package. } \examples{ \dontrun{ # From GitHub install_dev("dplyr") # From GitLab install_dev("iemiscdata") # From Bitbucket install_dev("argparser") } } \seealso{ Other package installation: \code{\link{install_bioc}()}, \code{\link{install_bitbucket}()}, \code{\link{install_cran}()}, \code{\link{install_github}()}, \code{\link{install_gitlab}()}, \code{\link{install_git}()}, \code{\link{install_local}()}, \code{\link{install_svn}()}, \code{\link{install_url}()}, \code{\link{install_version}()} } \concept{package installation} remotes/man/install_local.Rd0000644000176200001440000000563013621303675015624 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-local.R \name{install_local} \alias{install_local} \title{Install a package from a local file} \usage{ install_local( path = ".", subdir = NULL, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = !is_binary_pkg(path), build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ... ) } \arguments{ \item{path}{path to local directory, or compressed file (tar, zip, tar.gz tar.bz2, tgz2 or tbz)} \item{subdir}{subdirectory within url bundle that contains the R package.} \item{dependencies}{Which dependencies do you want to check? Can be a character vector (selecting from "Depends", "Imports", "LinkingTo", "Suggests", or "Enhances"), or a logical vector. \code{TRUE} is shorthand for "Depends", "Imports", "LinkingTo" and "Suggests". \code{NA} is shorthand for "Depends", "Imports" and "LinkingTo" and is the default. \code{FALSE} is shorthand for no dependencies (i.e. just check this package, not its dependencies).} \item{upgrade}{One of "default", "ask", "always", or "never". "default" respects the value of the \code{R_REMOTES_UPGRADE} environment variable if set, and falls back to "ask" if unset. "ask" prompts the user for which out of date packages to upgrade. For non-interactive sessions "ask" is equivalent to "always". \code{TRUE} and \code{FALSE} are also accepted and correspond to "always" and "never" respectively.} \item{force}{Force installation, even if the remote state has not changed since the previous install.} \item{quiet}{If \code{TRUE}, suppress output.} \item{build}{If \code{TRUE} build the package before installing.} \item{build_opts}{Options to pass to \verb{R CMD build}, only used when \code{build}} \item{build_manual}{If \code{FALSE}, don't build PDF manual ('--no-manual').} \item{build_vignettes}{If \code{FALSE}, don't build package vignettes ('--no-build-vignettes'). is \code{TRUE}.} \item{repos}{A character vector giving repositories to use.} \item{type}{Type of package to \code{update}.} \item{...}{Other arguments passed on to \code{\link[utils:install.packages]{utils::install.packages()}}.} } \description{ This function is vectorised so you can install multiple packages in a single command. } \examples{ \dontrun{ dir <- tempfile() dir.create(dir) pkg <- download.packages("testthat", dir, type = "source") install_local(pkg[, 2]) } } \seealso{ Other package installation: \code{\link{install_bioc}()}, \code{\link{install_bitbucket}()}, \code{\link{install_cran}()}, \code{\link{install_dev}()}, \code{\link{install_github}()}, \code{\link{install_gitlab}()}, \code{\link{install_git}()}, \code{\link{install_svn}()}, \code{\link{install_url}()}, \code{\link{install_version}()} } \concept{package installation} remotes/man/install_svn.Rd0000644000176200001440000000626613621303675015346 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-svn.R \name{install_svn} \alias{install_svn} \title{Install a package from a SVN repository} \usage{ install_svn( url, subdir = NULL, args = character(0), revision = NULL, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ... ) } \arguments{ \item{url}{Location of package. The url should point to a public or private repository.} \item{subdir}{A sub-directory within a svn repository that contains the package we are interested in installing.} \item{args}{A character vector providing extra options to pass on to \command{svn}.} \item{revision}{svn revision, if omitted updates to latest} \item{dependencies}{Which dependencies do you want to check? Can be a character vector (selecting from "Depends", "Imports", "LinkingTo", "Suggests", or "Enhances"), or a logical vector. \code{TRUE} is shorthand for "Depends", "Imports", "LinkingTo" and "Suggests". \code{NA} is shorthand for "Depends", "Imports" and "LinkingTo" and is the default. \code{FALSE} is shorthand for no dependencies (i.e. just check this package, not its dependencies).} \item{upgrade}{One of "default", "ask", "always", or "never". "default" respects the value of the \code{R_REMOTES_UPGRADE} environment variable if set, and falls back to "ask" if unset. "ask" prompts the user for which out of date packages to upgrade. For non-interactive sessions "ask" is equivalent to "always". \code{TRUE} and \code{FALSE} are also accepted and correspond to "always" and "never" respectively.} \item{force}{Force installation, even if the remote state has not changed since the previous install.} \item{quiet}{If \code{TRUE}, suppress output.} \item{build}{If \code{TRUE} build the package before installing.} \item{build_opts}{Options to pass to \verb{R CMD build}, only used when \code{build}} \item{build_manual}{If \code{FALSE}, don't build PDF manual ('--no-manual').} \item{build_vignettes}{If \code{FALSE}, don't build package vignettes ('--no-build-vignettes'). is \code{TRUE}.} \item{repos}{A character vector giving repositories to use.} \item{type}{Type of package to \code{update}.} \item{...}{Other arguments passed on to \code{\link[utils:install.packages]{utils::install.packages()}}.} } \description{ This function requires \command{svn} to be installed on your system in order to be used. } \details{ It is vectorised so you can install multiple packages with a single command. } \examples{ \dontrun{ install_svn("https://github.com/hadley/stringr/trunk") install_svn("https://github.com/hadley/httr/branches/oauth") } } \seealso{ Other package installation: \code{\link{install_bioc}()}, \code{\link{install_bitbucket}()}, \code{\link{install_cran}()}, \code{\link{install_dev}()}, \code{\link{install_github}()}, \code{\link{install_gitlab}()}, \code{\link{install_git}()}, \code{\link{install_local}()}, \code{\link{install_url}()}, \code{\link{install_version}()} } \concept{package installation} remotes/man/download.Rd0000644000176200001440000000341313621303675014610 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/download.R \name{download} \alias{download} \title{Download a file} \usage{ download( path, url, auth_token = NULL, basic_auth = NULL, quiet = TRUE, headers = NULL ) } \arguments{ \item{path}{Path to download to. \code{dirname(path)} must exist.} \item{url}{URL.} \item{auth_token}{Token for token-based authentication or \code{NULL}.} \item{basic_auth}{List with \code{user} and \code{password} for basic HTTP authentication, or \code{NULL}.} \item{quiet}{Passed to \code{\link[=download.file]{download.file()}} or \code{\link[curl:curl_download]{curl::curl_download()}}.} \item{headers}{Named character vector of HTTP headers to use.} } \value{ \code{path}, if the download was successful. } \description{ Uses either the curl package for R versions older than 3.2.0, otherwise a wrapper around \code{\link[=download.file]{download.file()}}. } \details{ We respect the \code{download.file.method} setting of the user. If it is not set, then see \code{download_method()} for choosing a method. Authentication can be supplied three ways: \itemize{ \item By setting \code{auth_token}. This will append an HTTP \code{Authorization} header: \verb{Authorization: token \{auth_token\}}. \item By setting \code{basic_auth} to a list with elements \code{user} and \code{password}. This will append a proper \verb{Authorization: Basic \{encoded_password\}} HTTP header. \item By specifying the proper \code{headers} directly. } If both \code{auth_token} and \code{basic_auth} are specified, that's an error. \code{auth_token} and \code{basic_auth} are \emph{appended} to \code{headers}, so they take precedence over an \code{Authorization} header that is specified directly in \code{headers}. } \keyword{internal} remotes/man/install_cran.Rd0000644000176200001440000000532113621303675015452 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-cran.R \name{install_cran} \alias{install_cran} \title{Attempts to install a package from CRAN.} \usage{ install_cran( pkgs, repos = getOption("repos"), type = getOption("pkgType"), dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, ... ) } \arguments{ \item{pkgs}{Character vector of packages to install.} \item{repos}{A character vector giving repositories to use.} \item{type}{Type of package to \code{update}.} \item{dependencies}{Which dependencies do you want to check? Can be a character vector (selecting from "Depends", "Imports", "LinkingTo", "Suggests", or "Enhances"), or a logical vector. \code{TRUE} is shorthand for "Depends", "Imports", "LinkingTo" and "Suggests". \code{NA} is shorthand for "Depends", "Imports" and "LinkingTo" and is the default. \code{FALSE} is shorthand for no dependencies (i.e. just check this package, not its dependencies).} \item{upgrade}{One of "default", "ask", "always", or "never". "default" respects the value of the \code{R_REMOTES_UPGRADE} environment variable if set, and falls back to "ask" if unset. "ask" prompts the user for which out of date packages to upgrade. For non-interactive sessions "ask" is equivalent to "always". \code{TRUE} and \code{FALSE} are also accepted and correspond to "always" and "never" respectively.} \item{force}{Force installation, even if the remote state has not changed since the previous install.} \item{quiet}{If \code{TRUE}, suppress output.} \item{build}{If \code{TRUE} build the package before installing.} \item{build_opts}{Options to pass to \verb{R CMD build}, only used when \code{build}} \item{build_manual}{If \code{FALSE}, don't build PDF manual ('--no-manual').} \item{build_vignettes}{If \code{FALSE}, don't build package vignettes ('--no-build-vignettes'). is \code{TRUE}.} \item{...}{Other arguments passed on to \code{\link[utils:install.packages]{utils::install.packages()}}.} } \description{ This function is vectorised on \code{pkgs} so you can install multiple packages in a single command. } \examples{ \dontrun{ install_cran("ggplot2") install_cran(c("httpuv", "shiny")) } } \seealso{ Other package installation: \code{\link{install_bioc}()}, \code{\link{install_bitbucket}()}, \code{\link{install_dev}()}, \code{\link{install_github}()}, \code{\link{install_gitlab}()}, \code{\link{install_git}()}, \code{\link{install_local}()}, \code{\link{install_svn}()}, \code{\link{install_url}()}, \code{\link{install_version}()} } \concept{package installation} remotes/man/standardise_dep.Rd0000644000176200001440000000155413361106113016123 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/deps.R \name{standardise_dep} \alias{standardise_dep} \title{Standardise dependencies using the same logical as \link{install.packages}} \usage{ standardise_dep(x) } \arguments{ \item{x}{The dependencies to standardise. A character vector (selecting from "Depends", "Imports", "LinkingTo", "Suggests", or "Enhances"), or a logical vector. \code{TRUE} is shorthand for "Depends", "Imports", "LinkingTo" and "Suggests". \code{NA} is shorthand for "Depends", "Imports" and "LinkingTo" and is the default. \code{FALSE} is shorthand for no dependencies.} } \description{ Standardise dependencies using the same logical as \link{install.packages} } \seealso{ \url{http://r-pkgs.had.co.nz/description.html#dependencies} for additional information on what each dependency type means. } \keyword{internal} remotes/man/gitlab_pat.Rd0000644000176200001440000000047313361106113015077 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-gitlab.R \name{gitlab_pat} \alias{gitlab_pat} \title{Retrieve GitLab personal access token.} \usage{ gitlab_pat(quiet = TRUE) } \description{ A GitLab personal access token Looks in env var \code{GITLAB_PAT} } \keyword{internal} remotes/man/download_version.Rd0000644000176200001440000000275413621303675016364 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-version.R \name{download_version} \alias{download_version} \title{Download a specified version of a CRAN package} \usage{ download_version( package, version = NULL, repos = getOption("repos"), type = getOption("pkgType"), ... ) } \arguments{ \item{package}{package name} \item{version}{If the specified version is NULL or the same as the most recent version of the package, this function simply calls \code{\link[utils:install.packages]{utils::install.packages()}}. Otherwise, it looks at the list of archived source tarballs and tries to install an older version instead.} \item{repos}{ character vector, the base URL(s) of the repositories to use, e.g., the URL of a CRAN mirror such as \code{"https://cloud.r-project.org"}. For more details on supported URL schemes see \code{\link{url}}. Can be \code{NULL} to install from local files, directories or URLs: this will be inferred by extension from \code{pkgs} if of length one. } \item{type}{character, indicating the type of package to download and install. Will be \code{"source"} except on Windows and some macOS builds: see the section on \sQuote{Binary packages} for those. } \item{...}{Other arguments passed on to \code{\link[utils:install.packages]{utils::install.packages()}}.} } \value{ Name of the downloaded file. } \description{ It downloads the package to a temporary file, and returns the name of the file. } remotes/man/install_gitlab.Rd0000644000176200001440000000670613621303675016001 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-gitlab.R \name{install_gitlab} \alias{install_gitlab} \title{Install a package from GitLab} \usage{ install_gitlab( repo, subdir = NULL, auth_token = gitlab_pat(quiet), host = "gitlab.com", dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ... ) } \arguments{ \item{repo}{Repository address in the format \verb{username/repo[@ref]}.} \item{subdir}{subdirectory within repo that contains the R package.} \item{auth_token}{To install from a private repo, generate a personal access token (PAT) in \url{https://gitlab.com/profile/personal_access_tokens} and supply to this argument. This is safer than using a password because you can easily delete a PAT without affecting any others. Defaults to the GITLAB_PAT environment variable.} \item{host}{GitLab API host to use. Override with your GitLab enterprise hostname, for example, \code{"gitlab.hostname.com"}.} \item{dependencies}{Which dependencies do you want to check? Can be a character vector (selecting from "Depends", "Imports", "LinkingTo", "Suggests", or "Enhances"), or a logical vector. \code{TRUE} is shorthand for "Depends", "Imports", "LinkingTo" and "Suggests". \code{NA} is shorthand for "Depends", "Imports" and "LinkingTo" and is the default. \code{FALSE} is shorthand for no dependencies (i.e. just check this package, not its dependencies).} \item{upgrade}{One of "default", "ask", "always", or "never". "default" respects the value of the \code{R_REMOTES_UPGRADE} environment variable if set, and falls back to "ask" if unset. "ask" prompts the user for which out of date packages to upgrade. For non-interactive sessions "ask" is equivalent to "always". \code{TRUE} and \code{FALSE} are also accepted and correspond to "always" and "never" respectively.} \item{force}{Force installation, even if the remote state has not changed since the previous install.} \item{quiet}{If \code{TRUE}, suppress output.} \item{build}{If \code{TRUE} build the package before installing.} \item{build_opts}{Options to pass to \verb{R CMD build}, only used when \code{build}} \item{build_manual}{If \code{FALSE}, don't build PDF manual ('--no-manual').} \item{build_vignettes}{If \code{FALSE}, don't build package vignettes ('--no-build-vignettes'). is \code{TRUE}.} \item{repos}{A character vector giving repositories to use.} \item{type}{Type of package to \code{update}.} \item{...}{Other arguments passed on to \code{\link[utils:install.packages]{utils::install.packages()}}.} } \description{ This function is vectorised on \code{repo} so you can install multiple packages in a single command. Like other remotes the repository will skip installation if \code{force == FALSE} (the default) and the remote state has not changed since the previous installation. } \examples{ \dontrun{ install_gitlab("jimhester/covr") } } \seealso{ Other package installation: \code{\link{install_bioc}()}, \code{\link{install_bitbucket}()}, \code{\link{install_cran}()}, \code{\link{install_dev}()}, \code{\link{install_github}()}, \code{\link{install_git}()}, \code{\link{install_local}()}, \code{\link{install_svn}()}, \code{\link{install_url}()}, \code{\link{install_version}()} } \concept{package installation} remotes/man/install_version.Rd0000644000176200001440000001021113621303675016206 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-version.R \name{install_version} \alias{install_version} \title{Install specified version of a CRAN package.} \usage{ install_version( package, version = NULL, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = FALSE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = "source", ... ) } \arguments{ \item{package}{package name} \item{version}{If the specified version is NULL or the same as the most recent version of the package, this function simply calls \code{\link[utils:install.packages]{utils::install.packages()}}. Otherwise, it looks at the list of archived source tarballs and tries to install an older version instead.} \item{dependencies}{logical indicating whether to also install uninstalled packages which these packages depend on/link to/import/suggest (and so on recursively). Not used if \code{repos = NULL}. Can also be a character vector, a subset of \code{c("Depends", "Imports", "LinkingTo", "Suggests", "Enhances")}. Only supported if \code{lib} is of length one (or missing), so it is unambiguous where to install the dependent packages. If this is not the case it is ignored, with a warning. The default, \code{NA}, means \code{c("Depends", "Imports", "LinkingTo")}. \code{TRUE} means to use \code{c("Depends", "Imports", "LinkingTo", "Suggests")} for \code{pkgs} and \code{c("Depends", "Imports", "LinkingTo")} for added dependencies: this installs all the packages needed to run \code{pkgs}, their examples, tests and vignettes (if the package author specified them correctly). In all of these, \code{"LinkingTo"} is omitted for binary packages. } \item{upgrade}{One of "default", "ask", "always", or "never". "default" respects the value of the \code{R_REMOTES_UPGRADE} environment variable if set, and falls back to "ask" if unset. "ask" prompts the user for which out of date packages to upgrade. For non-interactive sessions "ask" is equivalent to "always". \code{TRUE} and \code{FALSE} are also accepted and correspond to "always" and "never" respectively.} \item{force}{Force installation, even if the remote state has not changed since the previous install.} \item{quiet}{ logical: if true, reduce the amount of output. } \item{build}{If \code{TRUE} build the package before installing.} \item{build_opts}{Options to pass to \verb{R CMD build}, only used when \code{build}} \item{build_manual}{If \code{FALSE}, don't build PDF manual ('--no-manual').} \item{build_vignettes}{If \code{FALSE}, don't build package vignettes ('--no-build-vignettes'). is \code{TRUE}.} \item{repos}{ character vector, the base URL(s) of the repositories to use, e.g., the URL of a CRAN mirror such as \code{"https://cloud.r-project.org"}. For more details on supported URL schemes see \code{\link{url}}. Can be \code{NULL} to install from local files, directories or URLs: this will be inferred by extension from \code{pkgs} if of length one. } \item{type}{character, indicating the type of package to download and install. Will be \code{"source"} except on Windows and some macOS builds: see the section on \sQuote{Binary packages} for those. } \item{...}{Other arguments passed on to \code{\link[utils:install.packages]{utils::install.packages()}}.} } \description{ If you are installing an package that contains compiled code, you will need to have an R development environment installed. You can check if you do by running \code{devtools::has_devel} (you need the \code{devtools} package for this). } \seealso{ Other package installation: \code{\link{install_bioc}()}, \code{\link{install_bitbucket}()}, \code{\link{install_cran}()}, \code{\link{install_dev}()}, \code{\link{install_github}()}, \code{\link{install_gitlab}()}, \code{\link{install_git}()}, \code{\link{install_local}()}, \code{\link{install_svn}()}, \code{\link{install_url}()} } \author{ Jeremy Stephens } \concept{package installation} remotes/man/git_credentials.Rd0000644000176200001440000000050613503155613016135 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-git.R \name{git_credentials} \alias{git_credentials} \title{Specify git credentials to use} \usage{ git_credentials() } \description{ The global option \code{remotes.git_credentials} is used to set the git credentials. } \keyword{internal} remotes/man/parse-git-repo.Rd0000644000176200001440000000354213621303675015642 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/parse-git.R \name{parse-git-repo} \alias{parse-git-repo} \alias{parse_repo_spec} \alias{parse_github_repo_spec} \alias{parse_github_url} \title{Parse a remote git repo specification} \usage{ parse_repo_spec(repo) parse_github_repo_spec(repo) parse_github_url(repo) } \arguments{ \item{repo}{Character scalar, the repo specification.} } \value{ List with members: \code{username}, \code{repo}, \code{subdir} \code{ref}, \code{pull}, \code{release}, some which will be empty. } \description{ A remote repo can be specified in two ways: \describe{ \item{as a URL}{\code{parse_github_url()} handles HTTPS and SSH remote URLs and various GitHub browser URLs} \item{via a shorthand}{\code{parse_repo_spec()} handles this concise form: \verb{[username/]repo[/subdir][#pull|@ref|@*release]}} } } \examples{ parse_repo_spec("metacran/crandb") parse_repo_spec("jimhester/covr#47") ## pull request parse_repo_spec("jeroen/curl@v0.9.3") ## specific tag parse_repo_spec("tidyverse/dplyr@*release") ## shorthand for latest release parse_repo_spec("r-lib/remotes@550a3c7d3f9e1493a2ba") ## commit SHA parse_repo_spec("igraph=igraph/rigraph") ## Different package name from repo name parse_github_url("https://github.com/jeroen/curl.git") parse_github_url("git@github.com:metacran/crandb.git") parse_github_url("https://github.com/jimhester/covr") parse_github_url("https://github.example.com/user/repo.git") parse_github_url("git@github.example.com:user/repo.git") parse_github_url("https://github.com/r-lib/remotes/pull/108") parse_github_url("https://github.com/r-lib/remotes/tree/name-of-branch") parse_github_url("https://github.com/r-lib/remotes/commit/1234567") parse_github_url("https://github.com/r-lib/remotes/releases/latest") parse_github_url("https://github.com/r-lib/remotes/releases/tag/1.0.0") } remotes/man/install_bioc.Rd0000644000176200001440000000742413621303675015451 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-bioc.R \name{install_bioc} \alias{install_bioc} \title{Install a development package from the Bioconductor git repository} \usage{ install_bioc( repo, mirror = getOption("BioC_git", download_url("git.bioconductor.org/packages")), git = c("auto", "git2r", "external"), dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ... ) } \arguments{ \item{repo}{Repository address in the format \verb{[username:password@][release/]repo[#commit]}. Valid values for the release are \sQuote{devel}, \sQuote{release} (the default if none specified), or numeric release numbers (e.g. \sQuote{3.3}).} \item{mirror}{The Bioconductor git mirror to use} \item{git}{Whether to use the \code{git2r} package, or an external git client via system. Default is \code{git2r} if it is installed, otherwise an external git installation.} \item{dependencies}{Which dependencies do you want to check? Can be a character vector (selecting from "Depends", "Imports", "LinkingTo", "Suggests", or "Enhances"), or a logical vector. \code{TRUE} is shorthand for "Depends", "Imports", "LinkingTo" and "Suggests". \code{NA} is shorthand for "Depends", "Imports" and "LinkingTo" and is the default. \code{FALSE} is shorthand for no dependencies (i.e. just check this package, not its dependencies).} \item{upgrade}{One of "default", "ask", "always", or "never". "default" respects the value of the \code{R_REMOTES_UPGRADE} environment variable if set, and falls back to "ask" if unset. "ask" prompts the user for which out of date packages to upgrade. For non-interactive sessions "ask" is equivalent to "always". \code{TRUE} and \code{FALSE} are also accepted and correspond to "always" and "never" respectively.} \item{force}{Force installation, even if the remote state has not changed since the previous install.} \item{quiet}{If \code{TRUE}, suppress output.} \item{build}{If \code{TRUE} build the package before installing.} \item{build_opts}{Options to pass to \verb{R CMD build}, only used when \code{build}} \item{build_manual}{If \code{FALSE}, don't build PDF manual ('--no-manual').} \item{build_vignettes}{If \code{FALSE}, don't build package vignettes ('--no-build-vignettes'). is \code{TRUE}.} \item{repos}{A character vector giving repositories to use.} \item{type}{Type of package to \code{update}.} \item{...}{Other arguments passed on to \code{\link[utils:install.packages]{utils::install.packages()}}.} } \description{ This function requires \code{git} to be installed on your system in order to be used. } \details{ It is vectorised so you can install multiple packages with a single command. This is intended as an aid for Bioconductor developers. If you want to install the release version of a Bioconductor package one can use the \code{BiocManager} package. } \examples{ \dontrun{ install_bioc("SummarizedExperiment") install_bioc("release/SummarizedExperiment") install_bioc("3.3/SummarizedExperiment") install_bioc("SummarizedExperiment#abc123") install_bioc("user:password@release/SummarizedExperiment") install_bioc("user:password@devel/SummarizedExperiment") install_bioc("user:password@SummarizedExperiment#abc123") } } \seealso{ Other package installation: \code{\link{install_bitbucket}()}, \code{\link{install_cran}()}, \code{\link{install_dev}()}, \code{\link{install_github}()}, \code{\link{install_gitlab}()}, \code{\link{install_git}()}, \code{\link{install_local}()}, \code{\link{install_svn}()}, \code{\link{install_url}()}, \code{\link{install_version}()} } \concept{package installation} remotes/man/github_refs.Rd0000644000176200001440000000077113361106113015273 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/install-github.R \name{github_pull} \alias{github_pull} \alias{github_release} \title{GitHub references} \usage{ github_pull(pull) github_release() } \arguments{ \item{pull}{The pull request to install} } \description{ Use as \code{ref} parameter to \code{\link[=install_github]{install_github()}}. Allows installing a specific pull request or the latest release. } \seealso{ \code{\link[=install_github]{install_github()}} } remotes/DESCRIPTION0000644000176200001440000000324413622037662013450 0ustar liggesusersPackage: remotes Title: R Package Installation from Remote Repositories, Including 'GitHub' Version: 2.1.1 Authors@R: c( person("Jim", "Hester", , "jim.hester@rstudio.com", role = c("aut", "cre")), person("Gábor", "Csárdi", , "csardi.gabor@gmail.com", role = c("aut")), person("Hadley", "Wickham", role = c("aut")), person("Winston", "Chang", role = "aut"), person("RStudio", role = "cph"), person("Martin", "Morgan", role = "aut"), person("Dan", "Tenenbaum", role = "aut"), person("Mango Solutions", role = "cph") ) Description: Download and install R packages stored in 'GitHub', 'GitLab', 'Bitbucket', 'Bioconductor', or plain 'subversion' or 'git' repositories. This package provides the 'install_*' functions in 'devtools'. Indeed most of the code was copied over from 'devtools'. License: GPL (>= 2) URL: https://remotes.r-lib.org, https://github.com/r-lib/remotes#readme BugReports: https://github.com/r-lib/remotes/issues Imports: methods, stats, tools, utils Suggests: brew, callr, codetools, curl, covr, git2r (>= 0.23.0), knitr, mockery, pkgbuild (>= 1.0.1), pingr, rmarkdown, rprojroot, testthat, withr Depends: R (>= 3.0.0) VignetteBuilder: knitr RoxygenNote: 7.0.2 SystemRequirements: Subversion for install_svn, git for install_git Encoding: UTF-8 NeedsCompilation: no Packaged: 2020-02-13 20:36:09 UTC; jhester Author: Jim Hester [aut, cre], Gábor Csárdi [aut], Hadley Wickham [aut], Winston Chang [aut], RStudio [cph], Martin Morgan [aut], Dan Tenenbaum [aut], Mango Solutions [cph] Maintainer: Jim Hester Repository: CRAN Date/Publication: 2020-02-15 19:00:02 UTC remotes/build/0000755000176200001440000000000013621331470013027 5ustar liggesusersremotes/build/vignette.rds0000644000176200001440000000034013621331470015363 0ustar liggesusersmOK 0MZ? (^ '(nHq64Ӥ$QqĶb3ͼ "EK s49$Yr0YQ`̻h0J-W&iy . T]|;<ŻnΒk> )~gvUvv'Yq-"6E~XkizwB&:al~+remotes/tests/0000755000176200001440000000000013243326354013077 5ustar liggesusersremotes/tests/testthat/0000755000176200001440000000000013622037662014741 5ustar liggesusersremotes/tests/testthat/withremotes/0000755000176200001440000000000013476462075017322 5ustar liggesusersremotes/tests/testthat/withremotes/DESCRIPTION0000644000176200001440000000037313476462075021033 0ustar liggesusersPackage: noremotes Title: Tools to make developing R code easier License: MIT Description: Package description. Author: Bugs Bunny Maintainer: Bugs Bunny Version: 1.0.0 Suggests: testthat Imports: falsy Remotes: gaborcsardi/falsy remotes/tests/testthat/MASS/0000755000176200001440000000000013476462075015513 5ustar liggesusersremotes/tests/testthat/MASS/DESCRIPTION0000644000176200001440000000255013476462075017223 0ustar liggesusersPackage: MASS Priority: recommended Version: 7.3-51 Date: 2018-10-16 Revision: $Rev: 3490 $ Depends: R (>= 3.1.0), grDevices, graphics, stats, utils Imports: methods Suggests: lattice, nlme, nnet, survival Authors@R: c(person("Brian", "Ripley", role = c("aut", "cre", "cph"), email = "ripley@stats.ox.ac.uk"), person("Bill", "Venables", role = "ctb"), person(c("Douglas", "M."), "Bates", role = "ctb"), person("Kurt", "Hornik", role = "trl", comment = "partial port ca 1998"), person("Albrecht", "Gebhardt", role = "trl", comment = "partial port ca 1998"), person("David", "Firth", role = "ctb")) Description: Functions and datasets to support Venables and Ripley, "Modern Applied Statistics with S" (4th edition, 2002). Title: Support Functions and Datasets for Venables and Ripley's MASS LazyData: yes ByteCompile: yes License: GPL-2 | GPL-3 URL: http://www.stats.ox.ac.uk/pub/MASS4/ Contact: NeedsCompilation: yes Packaged: 2018-10-16 10:15:23 UTC; ripley Author: Brian Ripley [aut, cre, cph], Bill Venables [ctb], Douglas M. Bates [ctb], Kurt Hornik [trl] (partial port ca 1998), Albrecht Gebhardt [trl] (partial port ca 1998), David Firth [ctb] Maintainer: Brian Ripley Repository: CRAN Date/Publication: 2018-10-16 10:18:19 UTC remotes/tests/testthat/test-install-url.R0000644000176200001440000000150013352235013020271 0ustar liggesusers context("Installing from URLs") test_that("install_url", { skip_on_cran() skip_if_offline() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) url <- "https://github.com/mangothecat/simplegraph/archive/master.zip" install_url(url, lib = lib, quiet = TRUE) expect_silent(packageDescription("simplegraph", lib.loc = lib)) expect_equal( packageDescription("simplegraph", lib.loc = lib)$RemoteType, "url") expect_equal( trim_ws(packageDescription("simplegraph", lib.loc = lib)$RemoteUrl), url) remote <- package2remote("simplegraph", lib = lib) expect_s3_class(remote, "remote") expect_s3_class(remote, "url_remote") expect_equal(format(remote), "URL") expect_equal(remote$url, url) expect_equal(remote$subdir, NULL) }) remotes/tests/testthat/test-install-github.R0000644000176200001440000001132113503162605020757 0ustar liggesusers context("Install from GitHub") test_that("github_resolve_ref.github_release", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() expect_error( github_resolve_ref.github_release( NA, list(username = "hadley", repo = "devtools"), host = "api.github.com" ), NA ) }) test_that("github_resolve_ref.NULL", { expect_equal( github_resolve_ref(NULL, list()), list(ref = "master") ) }) test_that("github_resolve_ref.github_pull", { expect_error( github_resolve_ref( github_pull("1"), list(userame = "gaborcsardi", repo = "pkgconfig") ), "Cannot find GitHub pull request" ) }) test_that("github_resolve_ref.github_release", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() expect_error( github_resolve_ref( github_release(), list(userame = "gaborcsardi", repo = "xxxxxxxxxx") ), "Cannot find repo" ) }) test_that("github_release", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) install_github( "gaborcsardi/falsy", ref = github_release(), lib = lib, quiet = TRUE ) expect_silent(packageDescription("falsy", lib.loc = lib)) expect_equal( packageDescription("falsy", lib.loc = lib)$RemoteRepo, "falsy") }) test_that("install_github", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) install_github("cran/falsy", lib = lib, quiet = TRUE) expect_silent(packageDescription("falsy", lib.loc = lib)) expect_equal( packageDescription("falsy", lib.loc = lib)$RemoteRepo, "falsy") remote <- package2remote("falsy", lib = lib) expect_s3_class(remote, "remote") expect_s3_class(remote, "github_remote") expect_equal(format(remote), "GitHub") expect_equal(remote$host, "api.github.com") expect_equal(remote$username, "cran") expect_equal(remote$repo, "falsy") expect_equal(remote$ref, "master") expect_equal(remote$subdir, NULL) expect_true(!is.na(remote$sha) && nzchar(remote$sha)) }) test_that("error if not username, warning if given as argument", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) expect_error( install_github("falsy", lib = lib, quiet = TRUE), "Invalid git repo specification" ) }) test_that("remote_download.github_remote messages", { mockery::stub(remote_download.github_remote, "download", TRUE) expect_message( remote_download.github_remote( list( host = "api.github.com", username = "cran", repo = "falsy", ref = "master" ) ), "Downloading GitHub repo" ) }) test_that("remote_metadata.github_remote", { expect_equal( remote_metadata.github_remote(list(), sha = "foobar")$RemoteSha, "foobar" ) }) test_that("remote_sha.github_remote", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() expect_equal( remote_sha.github_remote( list( username = "cran", repo = "falsy", ref = "1.0", host = "api.github.com" ) ), "0f39d9eb735bf16909831c0bb129063dda388375" ) }) test_that("github_remote with deleted branch", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() # skip this test unless we are using curl skip_if(is_standalone() || !pkg_installed("curl")) expect_equal( remote_sha.github_remote( list( username = "tidyverse", repo = "purrr", ref = "rc-0.3.1", host = "api.github.com" ) ), NA_character_ ) }) test_that("github_pull", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) install_github( "r-lib/desc", ref = github_pull(64), lib = lib, quiet = TRUE ) expect_silent(packageDescription("desc", lib.loc = lib)) expect_equal( packageDescription("desc", lib.loc = lib)$RemoteRepo, "desc") }) test_that("remote_sha.github_remote errors if remote doesn't exist", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() expect_error(remote_sha(github_remote("arst/arst"))) }) test_that("remote_sha.github_remote returns expected value if remote does exist", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() expect_equal(remote_sha(github_remote("r-lib/devtools@v1.8.0")), "ad9aac7b9a522354e1ff363a86f389e32cec181b") }) remotes/tests/testthat/test-download.R0000644000176200001440000002144713621266613017657 0ustar liggesusers context("Download") test_that("download_method", { mockery::stub(download_method, "get_r_version", "3.3.0") mockery::stub(download_method, "has_curl", FALSE) with_options(list(download.file.method = NULL), expect_equal(download_method(), "auto")) mockery::stub(download_method, "get_r_version", "3.2.5") mockery::stub(download_method, "os_type", "windows") with_options(list(download.file.method = NULL), expect_equal(download_method(), "wininet")) mockery::stub(download_method, "get_r_version", "3.2.5") mockery::stub(download_method, "os_type", "unix") mockery::stub(download_method, "has_curl", TRUE) with_options(list(download.file.method = NULL), expect_equal(download_method(), "libcurl")) mockery::stub(download_method, "get_r_version", "3.2.5") mockery::stub(download_method, "os_type", "unix") mockery::stub(download_method, "has_curl", FALSE) with_options(list(download.file.method = NULL), expect_equal(download_method(), "auto")) }) test_that("download", { skip_on_cran() skip_if_offline() tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) download(tmp, "http://httpbin.org/get", auth_token = NULL) res <- json$parse_file(tmp) expect_null(res$headers$Authorization) download(tmp, "http://httpbin.org/get", auth_token = "foobar") res <- json$parse_file(tmp) expect_equal(res$headers$Authorization, "token foobar") }) test_that("os_type", { expect_equal(os_type(), .Platform$OS.type) }) test_that("download fallback to curl, https", { skip_on_cran() skip_if_offline() skip_if(is_standalone()) mockery::stub(download, "get_r_version", "3.0.0") download(tmp <- tempfile(), "https://httpbin.org/ip") expect_match(paste(readLines(tmp), collapse = "\n"), "origin") }) test_that("download with curl, basic auth", { skip_on_cran() skip_if_offline() skip_if(is_standalone()) mockery::stub(download, "get_r_version", "3.0.0") download( tmp <- tempfile(), "http://httpbin.org/basic-auth/user/passwd", basic_auth = list(user = "user", password = "passwd") ) expect_match( paste(readLines(tmp), collapse = "\n"), '"authenticated": true' ) }) test_that("base download with custom headers", { skip_on_cran() skip_if_offline() url <- "http://httpbin.org/anything" tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) head <- c("X-Custom" = "Foobar") base_download(url, path = tmp, quiet = TRUE, headers = head) expect_true(file.exists(tmp)) resp <- json$parse(readLines(tmp)) expect_equal(resp$headers$`X-Custom`, "Foobar") }) test_that("wget method download with custom headers", { skip_on_cran() skip_if_offline() skip_without_program("wget") url <- "http://httpbin.org/anything" tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) head <- c("X-Custom" = "Foobar") extra <- "--header=\"X-Another: extra-header\"" with_options( list(download.file.method = "wget", download.file.extra = extra), base_download(url, path = tmp, quiet = TRUE, headers = head)) expect_true(file.exists(tmp)) resp <- json$parse(readLines(tmp)) expect_equal(resp$headers$`X-Custom`, "Foobar") expect_equal(resp$headers$`X-Another`, "extra-header") }) test_that("curl method download with custom headers", { skip_on_cran() skip_if_offline() skip_without_program("curl") url <- "http://httpbin.org/anything" tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) head <- c("X-Custom" = "Foobar") extra <- "-H \"X-Another: extra-header\"" with_options( list(download.file.method = "curl", download.file.extra = extra), base_download(url, path = tmp, quiet = TRUE, headers = head)) expect_true(file.exists(tmp)) resp <- json$parse(readLines(tmp)) expect_equal(resp$headers$`X-Custom`, "Foobar") expect_equal(resp$headers$`X-Another`, "extra-header") }) test_that("internal method download with custom headers", { skip_on_cran() skip_if_offline() url <- "http://httpbin.org/anything" tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) head <- c("X-Custom" = "Foobar") with_options( list(download.file.method = "internal"), base_download(url, path = tmp, quiet = TRUE, headers = head)) expect_true(file.exists(tmp)) resp <- json$parse(readLines(tmp)) expect_equal(resp$headers$`X-Custom`, "Foobar") }) test_that("wininet method download with custom headers", { skip_on_cran() skip_if_offline() if (os_type() == "unix") return(expect_true(TRUE)) if (getRversion() < "3.6.0") return(expect_true(TRUE)) url <- "http://httpbin.org/anything" tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) head <- c("X-Custom" = "Foobar") with_options( list(download.file.method = "wininet"), base_download(url, path = tmp, quiet = TRUE, headers = head)) expect_true(file.exists(tmp)) resp <- json$parse(readLines(tmp)) expect_equal(resp$headers$`X-Custom`, "Foobar") }) test_that("curl download with custom headers", { skip_on_cran() skip_if_offline() url <- "https://httpbin.org/anything" tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) head <- c("X-Custom" = "Foobar") curl_download(url, path = tmp, quiet = TRUE, headers = head) expect_true(file.exists(tmp)) resp <- json$parse(readLines(tmp)) expect_equal(resp$headers$`X-Custom`, "Foobar") }) test_that("base download with basic auth", { skip_on_cran() skip_if_offline() url <- "http://httpbin.org/basic-auth/ruser/rpass" tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) download(url, path = tmp, quiet = TRUE, basic_auth = list(user = "ruser", password = "rpass")) expect_true(file.exists(tmp)) resp <- json$parse(readLines(tmp)) expect_true(resp$authenticated) expect_equal(resp$user, "ruser") }) test_that("base wget download with basic auth", { skip_on_cran() skip_if_offline() skip_without_program("wget") url <- "http://httpbin.org/basic-auth/ruser/rpass" tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) with_options( list(download.file.method = "wget"), download(url, path = tmp, quiet = TRUE, basic_auth = list(user = "ruser", password = "rpass"))) expect_true(file.exists(tmp)) resp <- json$parse(readLines(tmp)) expect_true(resp$authenticated) expect_equal(resp$user, "ruser") }) test_that("base curl download with basic auth", { skip_on_cran() skip_if_offline() skip_without_program("curl") url <- "http://httpbin.org/basic-auth/ruser/rpass" tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) with_options( list(download.file.method = "curl"), download(url, path = tmp, quiet = TRUE, basic_auth = list(user = "ruser", password = "rpass"))) expect_true(file.exists(tmp)) resp <- json$parse(readLines(tmp)) expect_true(resp$authenticated) expect_equal(resp$user, "ruser") }) test_that("base internal download with basic auth", { skip_on_cran() skip_if_offline() url <- "http://httpbin.org/basic-auth/ruser/rpass" tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) with_options( list(download.file.method = "internal"), download(url, path = tmp, quiet = TRUE, basic_auth = list(user = "ruser", password = "rpass"))) expect_true(file.exists(tmp)) resp <- json$parse(readLines(tmp)) expect_true(resp$authenticated) expect_equal(resp$user, "ruser") }) test_that("base wininet download with basic auth", { skip_on_cran() skip_if_offline() if (os_type() == "unix") return(expect_true(TRUE)) if (getRversion() < "3.6.0") return(expect_true(TRUE)) url <- "http://httpbin.org/basic-auth/ruser/rpass" tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) with_options( list(download.file.method = "wininet"), download(url, path = tmp, quiet = TRUE, basic_auth = list(user = "ruser", password = "rpass"))) expect_true(file.exists(tmp)) resp <- json$parse(readLines(tmp)) expect_true(resp$authenticated) expect_equal(resp$user, "ruser") }) test_that("curl download with basic auth", { skip_on_cran() skip_if_offline() mockery::stub(download, "get_r_version", "3.0.0") url <- "https://httpbin.org/basic-auth/ruser/rpass" tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) download(url, path = tmp, quiet = TRUE, basic_auth = list(user = "ruser", password = "rpass")) expect_true(file.exists(tmp)) resp <- json$parse(readLines(tmp)) expect_true(resp$authenticated) expect_equal(resp$user, "ruser") }) test_that("base curl download redirects", { skip_on_cran() skip_if_offline() skip_without_program("curl") url <- "http://httpbin.org/absolute-redirect/1" tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) with_options( list(download.file.method = "curl"), download(url, path = tmp, quiet = TRUE) ) expect_true(file.exists(tmp)) resp <- json$parse(readLines(tmp)) expect_equal(resp$url, "http://httpbin.org/get") }) remotes/tests/testthat/test-install-version.R0000644000176200001440000000713513502705061021170 0ustar liggesusers context("Install a specific version from CRAN") test_that("install_version", { skip_on_cran() skip_if_offline() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) repos <- getOption("repos") if (length(repos) == 0) repos <- character() repos[repos == "@CRAN@"] <- "http://cran.rstudio.com" install_version("pkgconfig", "1.0.0", lib = lib, repos = repos, quiet = TRUE) expect_silent(packageDescription("pkgconfig", lib.loc = lib)) desc <- packageDescription("pkgconfig", lib.loc = lib) expect_equal(desc$Version, "1.0.0") expect_null(desc$RemoteType) expect_null(desc$RemoteSubdir) expect_null(desc$RemoteUrl) }) test_that("package_find_repo() works correctly with multiple repos", { skip_on_cran() skip_if_offline() repos <- c(CRANextras = "http://www.stats.ox.ac.uk/pub/RWin", CRAN = "http://cran.rstudio.com") # ROI.plugin.glpk is the smallest package in the CRAN archive package <- "ROI.plugin.glpk" res <- package_find_repo(package, repos = repos) expect_true(NROW(res) >= 1) expect_equal(res$repo[1], "http://cran.rstudio.com") expect_match(rownames(res), package) }) test_that("install_version for current version", { skip_on_cran() skip_if_offline() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) repos <- getOption("repos") if (length(repos) == 0) repos <- character() repos[repos == "@CRAN@"] <- "http://cran.rstudio.com" install_version("pkgconfig", NULL, lib = lib, repos = repos, type = "source", quiet = TRUE) expect_silent(packageDescription("pkgconfig", lib.loc = lib)) }) test_that("intall_version and invalid version number", { skip_on_cran() skip_if_offline() repos <- getOption("repos") if (length(repos) == 0) repos <- character() repos[repos == "@CRAN@"] <- "http://cran.rstudio.com" expect_error( install_version("pkgconfig", "109.42", repos = repos), "version '109.42' is invalid for package 'pkgconfig'" ) expect_error( download_version("pkgconfig", "109.42", repos = repos), "version '109.42' is invalid for package 'pkgconfig'" ) }) test_that("install_version and non-existing package", { skip_on_cran() skip_if_offline() repos <- getOption("repos") if (length(repos) == 0) repos <- character() repos[repos == "@CRAN@"] <- "http://cran.rstudio.com" expect_error( install_version("42xxx", "1.0.0", repos = repos), "couldn't find package '42xxx'" ) }) test_that("install_version for archived packages", { skip_on_cran() skip_if_offline() repos <- getOption("repos") if (length(repos) == 0) repos <- character() repos[repos == "@CRAN@"] <- "http://cran.rstudio.com" lib <- tempfile() mockery::stub(install_version, "install_url", function(url, ...) url) mockery::stub(install_version, "add_metadata", NULL) expect_match(fixed = TRUE, install_version("igraph0", type = "source", lib = lib, repos = repos), "src/contrib/Archive/igraph0/igraph0_0.5.7.tar.gz" ) mockery::stub(download_version, "download", function(url, ...) url) expect_match(fixed = TRUE, download_version("igraph0", type = "source", lib = lib, repos = repos), "src/contrib/Archive/igraph0/igraph0_0.5.7.tar.gz" ) }) test_that("install_version for other types fails", { expect_error( install_version("igraph0", type = "binary"), "must be 'source'" ) expect_error( install_version("igraph0", type = "win.binary"), "must be 'source'" ) expect_error( install_version("igraph0", type = "mac.binary"), "must be 'source'" ) }) remotes/tests/testthat/Biobase/0000755000176200001440000000000013476462075016314 5ustar liggesusersremotes/tests/testthat/Biobase/DESCRIPTION0000644000176200001440000000201113476462075020014 0ustar liggesusersPackage: Biobase Title: Biobase: Base functions for Bioconductor Version: 2.43.1 Author: R. Gentleman, V. Carey, M. Morgan, S. Falcon Description: Functions that are needed by many other packages or which replace R functions. Suggests: tools, tkWidgets, ALL, RUnit, golubEsets Depends: R (>= 2.10), BiocGenerics (>= 0.27.1), utils Imports: methods Maintainer: Bioconductor Package Maintainer License: Artistic-2.0 Collate: tools.R strings.R environment.R vignettes.R packages.R AllGenerics.R VersionsClass.R VersionedClasses.R methods-VersionsNull.R methods-VersionedClass.R DataClasses.R methods-aggregator.R methods-container.R methods-MIAxE.R methods-MIAME.R methods-AssayData.R methods-AnnotatedDataFrame.R methods-eSet.R methods-ExpressionSet.R methods-MultiSet.R methods-SnpSet.R methods-NChannelSet.R anyMissing.R rowOp-methods.R updateObjectTo.R methods-ScalarObject.R zzz.R LazyLoad: yes biocViews: Infrastructure remotes/tests/testthat/archives/0000755000176200001440000000000013243326354016543 5ustar liggesusersremotes/tests/testthat/archives/foo.tar0000644000176200001440000001000013243326354020025 0ustar liggesusersfoo/000755 000765 000024 00000000000 12643157623 013275 5ustar00gaborcsardistaff000000 000000 foo/DESCRIPTION000644 000765 000024 00000000264 12643157612 015003 0ustar00gaborcsardistaff000000 000000 Package: foo Title: Foo Bar Description: Seriously. Foo Bar. Version: 1.0.0 Author: Gabor Csardi Maintainer: Gabor Csardi License: MIT + file LICENSE foo/R/000755 000765 000024 00000000000 12643157512 013473 5ustar00gaborcsardistaff000000 000000 foo/R/foo.R000644 000765 000024 00000000015 12643157512 014375 0ustar00gaborcsardistaff000000 000000 foo <- 1 + 1 remotes/tests/testthat/archives/foo.tar.bz20000644000176200001440000000060313243326354020531 0ustar liggesusersBZh91AY&SY.S֒ @  0ZBQ)OPڍi<9&L&L&M4 Dʞ&OP3SCj=O(ф=MKr|SÖHDzj" duxmPPFRSW>Hcenmσvr 4tvv=BYA+ivqʦÉaU({ P$d.ƞ ;/8Nj`W>\&tFRD&w^056BmA"n۫涽a LSO(\z^;,f9ڣ`NB$^nPqcPdE. eP+%///b]B@~:4remotes/tests/testthat/archives/foo.tar.gz0000644000176200001440000000060113243326354020452 0ustar liggesusersǙV]k0`+&HAdlS7 DeYvaI{udu8BIkJ z#DaڑxA;X4B}]oVk+=~<?QE*J}!e~HGyL*^nE'kz <}? #F:B0Xgbldkkg"Ls)U1RU&q]NhLRĹGpSM-9Ei_`FY[-/27*ꕌ3(<20zu:o؜?|?6:[oA&>Pc)6VB!B!B!N+&W(remotes/tests/testthat/archives/foo.tbz0000644000176200001440000000060313243326354020046 0ustar liggesusersBZh91AY&SY.S֒ @  0ZBQ)OPڍi<9&L&L&M4 Dʞ&OP3SCj=O(ф=MKr|SÖHDzj" duxmPPFRSW>Hcenmσvr 4tvv=BYA+ivqʦÉaU({ P$d.ƞ ;/8Nj`W>\&tFRD&w^056BmA"n۫涽a LSO(\z^;,f9ڣ`NB$^nPqcPdE. eP+%///b]B@~:4remotes/tests/testthat/archives/foo.tgz0000644000176200001440000000055513243326354020061 0ustar liggesusers2V[K0]W|a Q)mmxk[]#I{7m=U[)MLAcQP;\ㆾQzύ}oQ+HhnŻי˲ License: MIT + file LICENSE remotes/tests/testthat/archives/foo/R/0000755000176200001440000000000013243326354017527 5ustar liggesusersremotes/tests/testthat/archives/foo/R/foo.R0000644000176200001440000000001513243326354020431 0ustar liggesusersfoo <- 1 + 1 remotes/tests/testthat/archives/foo/configure0000755000176200001440000000000013621331471021217 0ustar liggesusersremotes/tests/testthat/archives/foo.zip0000644000176200001440000000137213243326354020055 0ustar liggesusersPK FL&Hfoo/UT ߌVVux PKAL&H%foo/DESCRIPTIONUT ߌVߌVux ]1 0FP"*Vx.ւ𾋱O]=0 D7N,+!ZHye+L dnP;`Lܟ3l=--4lTE4U 18<PK L&Hfoo/R/UT JߌVVux PK L&Hu foo/R/foo.RUT JߌVJߌVux foo <- 1 + 1 PK FL&HAfoo/UTߌVux PKAL&H%>foo/DESCRIPTIONUTߌVux PK L&HAfoo/R/UTJߌVux PK L&Hu Vfoo/R/foo.RUTJߌVux PK<remotes/tests/testthat/test-install-gitlab.R0000644000176200001440000000616313621330161020742 0ustar liggesuserscontext("Install from GitLab") test_that("install_gitlab", { skip_on_cran() skip_if_offline() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) install_gitlab("jimhester/falsy", lib = lib, quiet = TRUE) expect_silent(packageDescription("falsy", lib.loc = lib)) expect_equal( packageDescription("falsy", lib.loc = lib)$RemoteRepo, "falsy") remote <- package2remote("falsy", lib = lib) expect_s3_class(remote, "remote") expect_s3_class(remote, "gitlab_remote") expect_equal(format(remote), "GitLab") expect_equal(remote$host, "gitlab.com") expect_equal(remote$username, "jimhester") expect_equal(remote$repo, "falsy") expect_equal(remote$ref, "master") expect_equal(remote$subdir, NULL) expect_true(!is.na(remote$sha) && nzchar(remote$sha)) }) test_that("install_gitlab with subgroups and special characters", { skip_on_cran() skip_if_offline() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) install_gitlab("r-lib-grp/my-awesome-group/test.pkg", lib = lib, quiet = TRUE) expect_silent(packageDescription("test123", lib.loc = lib)) expect_equal( packageDescription("test123", lib.loc = lib)$RemoteRepo, "my-awesome-group/test.pkg") remote <- package2remote("test123", lib = lib) expect_equal(remote$username, "r-lib-grp") expect_equal(remote$repo, "my-awesome-group/test.pkg") expect_equal(remote$subdir, NULL) install_gitlab("r-lib-grp/test-pkg", lib = lib, quiet = TRUE) expect_silent(packageDescription("test123", lib.loc = lib)) expect_equal( packageDescription("test123", lib.loc = lib)$RemoteRepo, "test-pkg") remote <- package2remote("test123", lib = lib) expect_equal(remote$username, "r-lib-grp") expect_equal(remote$repo, "test-pkg") expect_equal(remote$subdir, NULL) }) test_that("error if not username, warning if given as argument", { skip_on_cran() skip_if_offline() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) expect_error( install_gitlab("falsy", lib = lib, quiet = TRUE) ) }) test_that("remote_download.gitlab_remote messages", { skip_on_cran() skip_if_offline() mockery::stub(remote_download.gitlab_remote, "download", TRUE) expect_message( remote_download.gitlab_remote( remote("gitlab", host = "https://gitlab.com", username = "jimhester", repo = "falsy", ref = "master" ) ), "Downloading GitLab repo" ) }) test_that("remote_sha.gitlab_remote", { skip_on_cran() skip_if_offline() expect_equal( remote_sha( remote("gitlab", host = "https://gitlab.com", username = "jimhester", repo = "falsy", ref = "1.0" ) ), "0f39d9eb735bf16909831c0bb129063dda388375" ) }) test_that("gitlab_project_id", { skip_on_cran() skip_if_offline() expect_equal( gitlab_project_id( username = "jimhester", repo = "covr", host = "https://gitlab.com", ref = "master" ), 1486846 ) }) remotes/tests/testthat/test-install-local.R0000644000176200001440000000336713503160034020574 0ustar liggesusers context("Installing from local files") test_that("install_local", { skip_on_cran() skip_if_offline() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) dir <- tempfile() on.exit(unlink(dir), add = TRUE) dir.create(dir) repos <- getOption("repos") if (length(repos) == 0) repos <- character() repos[repos == "@CRAN@"] <- "http://cran.rstudio.com" pkg <- download.packages("pkgconfig", dir, repos = repos, type = "source", quiet = TRUE) install_local(pkg[, 2], lib = lib, quiet = TRUE) expect_silent(packageDescription("pkgconfig", lib.loc = lib)) expect_equal( packageDescription("pkgconfig", lib.loc = lib)$RemoteType, "local") remote <- package2remote("pkgconfig", lib = lib) expect_s3_class(remote, "remote") expect_s3_class(remote, "local_remote") expect_equal(format(remote), "local") expect_equal(remote$path, normalizePath(pkg[, 2])) expect_true(!is.na(remote$sha) && nzchar(remote$sha)) }) test_that("local remotes do not fail if the local install no longer exists", { skip_on_cran() skip_if_offline() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) dir <- tempfile() on.exit(unlink(dir, recursive = TRUE), add = TRUE) dir.create(dir) repos <- getOption("repos") if (length(repos) == 0) repos <- character() repos[repos == "@CRAN@"] <- "http://cran.rstudio.com" pkg <- download.packages("pkgconfig", dir, repos = repos, type = "source", quiet = TRUE) install_local(pkg[, 2], lib = lib, quiet = TRUE) unlink(dir, recursive = TRUE) withr::with_libpaths(lib, expect_error(update_packages(pkg[, 1], lib = lib, upgrade = TRUE, quiet = TRUE), NA) ) }) remotes/tests/testthat/test-deps.R0000644000176200001440000003554613502702607017004 0ustar liggesusers context("Deps") test_that("standardise_dep", { expect_equal( standardise_dep(NA), c("Depends", "Imports", "LinkingTo") ) expect_equal( standardise_dep(TRUE), c("Depends", "Imports", "LinkingTo", "Suggests") ) expect_equal( standardise_dep(FALSE), character(0) ) expect_equal( standardise_dep(c("Imports", "Suggests")), c("Imports", "Suggests") ) expect_error( standardise_dep(1:10), "Dependencies must be a boolean or a character vector" ) }) test_that("compare_versions", { expect_equal( compare_versions( c("1.0.0", "1.0.0", "1.0.0"), c("1.0.1", "0.9.0", "1.0.0"), c(TRUE, TRUE, TRUE)), c(-1L, 1L, 0L) ) expect_equal( compare_versions( c(NA, "1.0.0"), c("1.0.0", NA), c(TRUE, TRUE)), c(-2L, 2L) ) }) test_that("remote_deps", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() pkg <- list( package = "foo", remotes = "github::hadley/testthat@v2.0.0,klutometis/roxygen@v6.0.1" ) res <- remote_deps(pkg) expect_equal(res$package, c("testthat", "roxygen2")) # There are the shas for the v2.0.0 and v6.0.1 tags respectivly expect_equal( res$available, c("b0c0d5dcd78c5f97790c4b6ddb5babbce4c63a9d", "c4081879d943ad31eacbbf47410dca0ff88d6460")) }) test_that("printing package_deps", { object <- data.frame( stringsAsFactors = FALSE, package = c("dotenv", "falsy", "magrittr"), installed = c("1.0", "1.0", "1.0"), available = c("1.0", NA, "1.0"), diff = c(0L, 2L, 0L) ) object$remote <- list(cran_remote("dotenv", NULL, NULL), cran_remote("falsy", NULL, NULL), cran_remote("magrittr", NULL, NULL)) class(object) <- c("package_deps", "data.frame") expect_output( print(object), "Not on CRAN.*\n.*package.*\n.*falsy.*" ) expect_output( print(object, show_ok = TRUE), paste( sep = "\n", "Not on CRAN.*", ".*package.*", ".*falsy.*", "OK.*", ".*package.*", " dotenv.*", " magrittr" ) ) object <- data.frame( stringsAsFactors = FALSE, package = c("dotenv", "falsy", "magrittr"), installed = c("1.0", "1.0", NA), available = c("1.0", "1.1", "1.0"), diff = c(0L, -1L, -2L) ) object$remote <- list(cran_remote("dotenv", NULL, NULL), cran_remote("falsy", NULL, NULL), cran_remote("magrittr", NULL, NULL)) class(object) <- c("package_deps", "data.frame") expect_output( print(object), paste( sep = "\n", "Needs update ------.*", " package .*", " falsy .*", " magrittr .*" ) ) }) test_that("update_packages", { object <- data.frame( stringsAsFactors = FALSE, package = c("dotenv", "falsy", "magrittr"), installed = c("1.0", "1.0", NA), available = c("1.0", "1.1", "1.0"), diff = c(0L, -1L, -2L) ) class(object) <- c("package_deps", "data.frame") mockery::stub(update_packages, "package_deps", object) mockery::stub( update_packages, "update.package_deps", function(x, ...) x$package) expect_equal( update_packages("dotenv"), c("dotenv", "falsy", "magrittr") ) }) test_that("Additional_repositories field", { pkg <- list( additional_repositories = "http://packages.ropensci.org, http://foo.bar.com" ) expect_equal( parse_additional_repositories(pkg), c("http://packages.ropensci.org", "http://foo.bar.com") ) pkg <- list( additional_repositories = "http://packages.ropensci.org, \nhttp://foo.bar.com" ) expect_equal( parse_additional_repositories(pkg), c("http://packages.ropensci.org", "http://foo.bar.com") ) pkg <- list( additional_repositories = "\n http://packages.ropensci.org, \nhttp://foo.bar.com" ) expect_equal( parse_additional_repositories(pkg), c("http://packages.ropensci.org", "http://foo.bar.com") ) }) test_that("update.package_deps", { object <- data.frame( stringsAsFactors = FALSE, package = c("dotenv", "falsy", "magrittr"), installed = c("1.0", "1.0", "1.0"), available = c("1.0", NA, "1.0"), diff = c(CURRENT, UNAVAILABLE, CURRENT), is_cran = c(TRUE, TRUE, TRUE) ) object$remote <- list( cran_remote("dotenv", getOption("repos"), getOption("type")), cran_remote("falsy", getOption("repos"), getOption("type")), cran_remote("magrittr", getOption("repos"), getOption("type")) ) class(object) <- c("package_deps", "data.frame") mockery::stub(update, "install_packages", NULL) expect_message( update(object, upgrade = TRUE, quiet = FALSE), "Skipping 1 packages? not available: falsy" ) }) test_that("update.package_deps 2", { object <- data.frame( stringsAsFactors = FALSE, package = c("dotenv", "falsy", "magrittr"), installed = c("1.0", "1.1", "1.0"), available = c("1.0", "1.0", "1.0"), diff = c(CURRENT, AHEAD, CURRENT), is_cran = c(TRUE, TRUE, TRUE) ) object$remote <- list( cran_remote("dotenv", getOption("repos"), getOption("type")), cran_remote("falsy", getOption("repos"), getOption("type")), cran_remote("magrittr", getOption("repos"), getOption("type")) ) class(object) <- c("package_deps", "data.frame") mockery::stub(update, "install_packages", NULL) expect_message( update(object, upgrade = TRUE, quiet = FALSE), "Skipping 1 packages? ahead of CRAN: falsy" ) }) test_that("update.package_deps 3", { object <- data.frame( stringsAsFactors = FALSE, package = c("dotenv", "falsy", "magrittr"), installed = c("1.0", "1.0", NA), available = c("1.0", "1.1", "1.0"), diff = c(CURRENT, BEHIND, UNINSTALLED), is_cran = c(TRUE, TRUE, TRUE) ) object$remote <- list( cran_remote("dotenv", getOption("repos"), getOption("type")), cran_remote("falsy", getOption("repos"), getOption("type")), cran_remote("magrittr", getOption("repos"), getOption("type")) ) class(object) <- c("package_deps", "data.frame") mockery::stub( update.package_deps, "install_packages", function(packages, ...) packages) expect_equal( update(object, upgrade = FALSE), NULL ) }) context("Remotes") test_that("remote_deps returns an empty data frame if no remotes specified", { pkg <- list( package = "foo" ) expect_equal(remote_deps(pkg), package_deps_new()) }) test_that("remote_deps works with implicit types", { expect_equal( parse_one_remote("hadley/testthat"), github_remote("hadley/testthat") ) expect_equal(split_remotes("hadley/testthat,klutometis/roxygen"), c("hadley/testthat", "klutometis/roxygen")) expect_equal(split_remotes("hadley/testthat,\n klutometis/roxygen"), c("hadley/testthat", "klutometis/roxygen")) expect_equal(split_remotes("hadley/testthat,\n\t klutometis/roxygen"), c("hadley/testthat", "klutometis/roxygen")) }) test_that("split_remotes errors with missing commas", { expect_error(split_remotes("hadley/testthat hadley/ggplot2"), "Missing commas") expect_error(split_remotes("hadley/testthat\n hadley/ggplot2"), "Missing commas") expect_error(split_remotes("hadley/testthat, hadley/ggplot2, klutometis/roxygen r-lib/devtools"), "Missing commas.*'klutometis") }) test_that("parse_one_remote errors", { expect_error(parse_one_remote(""), "Malformed remote specification ''") expect_error(parse_one_remote("git::testthat::blah"), "Malformed remote specification 'git::testthat::blah'") expect_error(parse_one_remote("hadley::testthat"), "Unknown remote type: hadley") expect_error(parse_one_remote("SVN2::testthat"), "Unknown remote type: SVN2") expect_error( parse_one_remote("git::testthat::blah"), "Malformed remote specification 'git::testthat::blah'" ) expect_error( parse_one_remote("hadley::testthat"), "Unknown remote type: hadley" ) expect_error( parse_one_remote("SVN2::testthat"), "Unknown remote type: SVN2" ) }) test_that("remotes are parsed with explicit types", { expect_equal( parse_one_remote("github::hadley/testthat"), github_remote("hadley/testthat")) expect_equal(split_remotes("github::hadley/testthat,klutometis/roxygen"), c("github::hadley/testthat", "klutometis/roxygen")) expect_equal(split_remotes("hadley/testthat,github::klutometis/roxygen"), c("hadley/testthat", "github::klutometis/roxygen")) expect_equal(split_remotes("github::hadley/testthat,github::klutometis/roxygen"), c("github::hadley/testthat", "github::klutometis/roxygen")) expect_equal(split_remotes("bioc::user:password@release/Biobase#12345,github::klutometis/roxygen"), c("bioc::user:password@release/Biobase#12345", "github::klutometis/roxygen")) }) test_that("type = 'both' works well", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() # -6 is the remote column, which includes the pkg_type # as an attribute, so we remove it from the comparison expect_equal( package_deps("falsy", type = "both")[-6], package_deps("falsy", type = "binary")[-6] ) }) test_that("resolve_upgrade works", { # returns ask by default when used interactively expect_equal(resolve_upgrade(c("default", "ask", "always", "never"), is_interactive = TRUE), "ask") # returns always by default when used non-interactively expect_equal(resolve_upgrade(c("default", "ask", "always", "never"), is_interactive = FALSE), "always") # returns always when given TRUE or always input expect_equal(resolve_upgrade(TRUE, is_interactive = FALSE), "always") expect_equal(resolve_upgrade("always", is_interactive = FALSE), "always") # returns never when given FALSE or never input expect_equal(resolve_upgrade(FALSE, is_interactive = FALSE), "never") expect_equal(resolve_upgrade("never", is_interactive = FALSE), "never") # errors on unexpected inputs expect_error(resolve_upgrade("sometimes"), "'arg' should be one of") }) test_that("upgradeable_packages works", { object <- data.frame( stringsAsFactors = FALSE, package = c("dotenv", "falsy", "rlang", "magrittr"), installed = c("1.0", "1.0", "abc123", NA), available = c("1.0", "1.1", "zyx456", "1.0"), diff = c(CURRENT, BEHIND, BEHIND, UNINSTALLED), is_cran = c(TRUE, TRUE, FALSE, TRUE) ) object$remote <- list( cran_remote("dotenv", getOption("repos"), getOption("type")), cran_remote("falsy", getOption("repos"), getOption("type")), github_remote("rlib/rlang"), cran_remote("magrittr", getOption("repos"), getOption("type")) ) class(object) <- c("package_deps", "data.frame") # returns full object if "always" expect_equal(upgradable_packages(object, "always", TRUE), object) # returns only uninstalled packages if "never" expect_equal(upgradable_packages(object, "never", TRUE), object[which(object$package == "magrittr"), ]) # returns full object if "ask" and not is_interactive expect_equal(upgradable_packages(object, "ask", is_interactive = FALSE, TRUE), object) # returns selected row to update if "ask" and is_interactive mockery::stub(upgradable_packages, "select_menu", function(...) "falsy (1.0 -> 1.1 ) [CRAN]", TRUE) expect_equal(upgradable_packages(object, "ask", TRUE, is_interactive = TRUE), object[c( which(object$package == "falsy"), which(object$package == "magrittr") ), ] ) # returns selected rows to update if "ask" and is_interactive mockery::stub(upgradable_packages, "select_menu", function(...) c("falsy (1.0 -> 1.1 ) [CRAN]", "rlang (abc123 -> zyx456) [GitHub]")) expect_equal(upgradable_packages(object, "ask", TRUE, is_interactive = TRUE), object[c( which(object$package == "falsy"), which(object$package == "rlang"), which(object$package == "magrittr") ), ] ) # All should be the whole object mockery::stub(upgradable_packages, "select_menu", function(...) "All") expect_equal(upgradable_packages(object, "ask", TRUE, is_interactive = TRUE), object) # None should be only un-installed packages mockery::stub(upgradable_packages, "select_menu", function(...) "None") expect_equal(upgradable_packages(object, "ask", TRUE, is_interactive = TRUE), object[which(object$package == "magrittr"), ]) # CRAN should be only the CRAN packages mockery::stub(upgradable_packages, "select_menu", function(...) "CRAN packages only") expect_equal(upgradable_packages(object, "ask", TRUE, is_interactive = TRUE), object[c( which(object$package == "falsy"), which(object$package == "magrittr") ), ] ) # empty vector should be the 0 row object (you get this when canceling the selection) mockery::stub(upgradable_packages, "select_menu", function(...) character(0)) expect_equal(upgradable_packages(object, "ask", TRUE, is_interactive = TRUE), object[which(object$package == "magrittr"), ]) # If only given current or ahead packages (which dotenv is), just return that expect_equal(upgradable_packages(object[object$package == "dotenv", ], "ask", TRUE, is_interactive = TRUE), object[object$package == "dotenv", ]) }) test_that("format_upgrades works", { object <- data.frame( stringsAsFactors = FALSE, package = c("dotenv", "falsy", "rlang", "magrittr"), installed = c("1.0", "1.0", "abc123", NA), available = c("1.0", "1.1", "zyx456", "1.0"), diff = c(CURRENT, BEHIND, BEHIND, UNINSTALLED), is_cran = c(TRUE, TRUE, FALSE, TRUE) ) object$remote <- list( cran_remote("dotenv", getOption("repos"), getOption("type")), cran_remote("falsy", getOption("repos"), getOption("type")), github_remote("rlib/rlang"), cran_remote("magrittr", getOption("repos"), getOption("type")) ) class(object) <- c("package_deps", "data.frame") expect_equal( format_upgrades(object[0, ]), character(0) ) expect_equal( format_upgrades(object[object$diff < BEHIND, ]), "magrittr (NA -> 1.0) [CRAN]" ) expect_equal( format_upgrades(object[object$diff <= BEHIND, ]), c( "falsy (1.0 -> 1.1 ) [CRAN]", "rlang (abc123 -> zyx456) [GitHub]", "magrittr (NA -> 1.0 ) [CRAN]" ) ) expect_equal( format_upgrades(object), c( "dotenv (1.0 -> 1.0 ) [CRAN]", "falsy (1.0 -> 1.1 ) [CRAN]", "rlang (abc123 -> zyx456) [GitHub]", "magrittr (NA -> 1.0 ) [CRAN]" ) ) }) test_that("dev_package_deps works with package using remotes", { skip_on_cran() skip_if_offline() res <- dev_package_deps(test_path("withremotes"), dependencies = TRUE) is_falsy <- "falsy" == res$package expect_true(any(is_falsy)) expect_is(res$remote[is_falsy][[1]], "github_remote") is_testthat <- "testthat" == res$package expect_true(any(is_testthat)) expect_is(res$remote[is_testthat][[1]], "cran_remote") }) remotes/tests/testthat/test-github.R0000644000176200001440000000477313502763411017331 0ustar liggesusers context("GitHub") test_that("github_pat", { withr::local_envvar(c(GITHUB_PAT="badcafe")) expect_equal(github_pat(), "badcafe") expect_message(github_pat(quiet = FALSE), "Using github PAT from envvar GITHUB_PAT") withr::with_envvar(c(GITHUB_PAT=NA, CI=NA), { expect_equal(github_pat(), NULL) }) withr::with_envvar(c(GITHUB_PAT=NA, CI="true"), { expect_true(nzchar(github_pat())) }) expect_true(nzchar(github_pat())) }) test_that("github_commit", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() # Without curl expect_equal( github_commit("r-lib", "remotes", "1.0.0", use_curl = FALSE), "92e5d5c061f181242cb877e4714bea76d94927da") # With curl expect_equal( github_commit("r-lib", "remotes", "1.0.0", use_curl = TRUE), "92e5d5c061f181242cb877e4714bea76d94927da") # With curl and different local sha expect_equal( github_commit("r-lib", "remotes", "1.0.0", use_curl = TRUE, current_sha = "xyz"), "92e5d5c061f181242cb877e4714bea76d94927da") # With curl and same local sha expect_equal( github_commit("r-lib", "remotes", "1.0.0", use_curl = TRUE, current_sha = "92e5d5c061f181242cb877e4714bea76d94927da"), "92e5d5c061f181242cb877e4714bea76d94927da") }) test_that("github_DESCRIPTION", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() tmp <- tempfile() on.exit(unlink(tmp)) download(tmp, "https://raw.githubusercontent.com/r-lib/remotes/1.0.0/DESCRIPTION") desc <- readChar(tmp, file.info(tmp)$size) # Without curl expect_equal( github_DESCRIPTION("r-lib", "remotes", ref = "1.0.0", use_curl = FALSE), desc) # With curl expect_equal( github_DESCRIPTION("r-lib", "remotes", ref = "1.0.0", use_curl = TRUE), desc) }) test_that("github_error", { mockery::stub( github_error, "curl::parse_headers_list", list(`x-ratelimit-remaining` = 0, `x-ratelimit-limit` = 5000, `x-ratelimit-reset` = "1539962178")) # Test without the TRAVIS envvar set withr::with_envvar(c(TRAVIS = NA), { err <- github_error(list(headers = "", status_code = "304", content = charToRaw('{"message": "foobar"}'))) expect_known_output(conditionMessage(err), test_path("github-error-local.txt"), print = TRUE) }) # Test with the TRAVIS envvar set withr::with_envvar(c(TRAVIS = "true"), { err <- github_error(list(headers = "", status_code = "304", content = charToRaw('{"message": "foobar"}'))) expect_known_output(conditionMessage(err), test_path("github-error-travis.txt"), print = TRUE) }) }) remotes/tests/testthat/test-install-deps.R0000644000176200001440000000075313352235013020433 0ustar liggesusers context("Installing package dependencies") test_that("installing packages with dependencies", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) install_github("cran/desc", lib = lib, quiet = TRUE) expect_silent(packageDescription("desc", lib.loc = lib)) expect_equal( packageDescription("desc", lib.loc = lib)$RemoteRepo, "desc" ) }) remotes/tests/testthat/test-submodule.R0000644000176200001440000000737413621330340020037 0ustar liggesuserscontext("submodule.R") test_that("parse_submodules works with a single submodule", { x <- '[submodule "foobar"] path = baz url = http://foo/bar' expect_equal( parse_submodules(x), data.frame( submodule = "foobar", path = "baz", url = "http://foo/bar", branch = NA_character_, stringsAsFactors = FALSE)) }) test_that("parse_submodules works multiple submodules", { y <- '[submodule "foobar"] path = baz url = http://foo/bar [submodule "foofoo"] path = bunny url = http://little/bunny/foofoo branch = forest' expect_equal( parse_submodules(y), data.frame( submodule = c("foobar", "foofoo"), path = c("baz", "bunny"), url = c("http://foo/bar", "http://little/bunny/foofoo"), branch = c(NA_character_, "forest"), stringsAsFactors = FALSE)) }) test_that("parse_submodules warns and returns empty for invalid submodules", { x <- '[submodule "foobar"] path = baz' expect_warning(regexp = "Invalid submodule definition", expect_equal( parse_submodules(x), list() ) ) y <- '[submodule "foobar"] path = baz [submodule "foofoo"] path = bunny url = http://little/bunny/foofoo' expect_warning(regexp = "Invalid submodule definition", expect_equal( parse_submodules(y), list() ) ) z <- ' # [submodule "foobar"] this one is commented out # path = baz # url = https://foo/bar' expect_equal( parse_submodules(z), list() ) }) test_that("Can install a repo with a submodule", { if (is.null(git_path())) skip("git is not installed") dir <- tempfile() dir.create(dir) on.exit(unlink(dir, recursive = TRUE, force = TRUE)) writeLines("foo <- 1", file.path(dir, "foo.R")) in_dir(dir, { git("init") git(paste("add", "-A", ".")) git(paste( # We need to temporarily set the user name and user email, # in case they are not set "-c", "user.name=foobar", "-c", paste0("user.email=", shQuote("<>")), "commit", "-m", shQuote("Initial commit"))) }) module <- file.path("submodule", ".gitmodules") on.exit(unlink(module), add = TRUE) writeLines(con = module, sprintf( '[submodule "foo"] path = R url = file://%s branch = master [submodule "bar"] path = bar url = file://%s branch = master', URLencode(dir), URLencode(dir) ) ) # The bar submodule is in .Rbuildignore, so we will not fetch it build_ignore <- file.path("submodule", ".Rbuildignore") on.exit(unlink(build_ignore), add = TRUE) writeLines("^bar$", build_ignore) update_submodules("submodule", NULL, quiet = TRUE) expect_true(dir.exists(file.path("submodule", "R"))) expect_false(dir.exists(file.path("submodule", "bar"))) # Now remove the R directory so we can try installing the full package unlink(file.path("submodule", "R"), recursive = TRUE, force = TRUE) # Install the package to a temporary library and verify it works lib <- tempfile() on.exit(unlink(lib, recursive = TRUE, force = TRUE), add = TRUE) dir.create(lib) install_local("submodule", lib = lib, quiet = TRUE) withr::with_libpaths(lib, expect_equal(submodule::foo, 1) ) }) test_that("Can update a submodule with an empty .gitmodules submodule", { if (is.null(git_path())) skip("git is not installed") dir <- tempfile() dir.create(dir) on.exit(unlink(dir, recursive = TRUE, force = TRUE)) module <- file.path("submodule", ".gitmodules") on.exit(unlink(module), add = TRUE) writeLines(con = module,text = "") # The bar submodule is in .Rbuildignore, so we will not fetch it build_ignore <- file.path("submodule", ".Rbuildignore") on.exit(unlink(build_ignore), add = TRUE) writeLines("^bar$", build_ignore) expect_error( update_submodules("submodule", NULL, quiet = TRUE), NA ) }) remotes/tests/testthat/test-install-cran.R0000644000176200001440000000075313352235013020423 0ustar liggesuserscontext("Installing from CRAN") test_that("", { skip_on_cran() skip_if_offline() repos <- getOption("repos") if (length(repos) == 0) repos <- character() repos[repos == "@CRAN@"] <- "http://cran.rstudio.com" Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) install_cran("pkgconfig", lib = lib, repos = repos, force = TRUE, quiet = TRUE) expect_silent(packageDescription("pkgconfig", lib.loc = lib)) }) remotes/tests/testthat/test-cran.R0000644000176200001440000000033213621311267016755 0ustar liggesusers context("CRAN") test_that("available_packages", { skip_on_cran() pkgs <- available_packages( repos = c(CRAN = "http://cran.rstudio.com"), type = "source" ) expect_true(inherits(pkgs, "matrix")) }) remotes/tests/testthat/test-script.R0000644000176200001440000000310413476462075017352 0ustar liggesusers context("install-github.R script") test_that("install-github.R script is up to date", { skip_on_cran() root <- system.file(package = packageName()) tmp <- test_temp_file(".R") withr::with_dir( rprojroot::find_package_root_file(), brew::brew(file.path(root, "install-github.Rin"), tmp)) expect_equal( readLines(tmp), readLines(file.path(root, "install-github.R"))) }) test_that("use install-github.R script", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() script <- system.file(package = packageName(), "install-github.R") lib <- test_temp_dir() expect_error( source(script)$value("cran/falsy", lib = lib, quiet = TRUE), NA) expect_equal( packageDescription("falsy", lib.loc = lib)$RemoteRepo, "falsy") }) test_that("install-github.R script does not load any package", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() script <- system.file(package = packageName(), "install-github.R") lib <- test_temp_dir() pkgs <- callr::r( function(script, lib) { ## compiler and tools are ok to load library(compiler) library(tools) orig <- loadedNamespaces() source(script)$value("cran/falsy", lib = lib) new <- loadedNamespaces() ## Need to load curl on R < 3.2.0, for HTTPS, so we ignore this if (getRversion() < "3.2.0") new <- setdiff(new, "curl") list(orig, new) }, args = list(script = script, lib = lib), cmdargs = c("--vanilla", "--slave", "--no-save", "--no-restore"), timeout = 20 ) expect_equal(pkgs[[1]], pkgs[[2]]) }) remotes/tests/testthat/test-package.R0000644000176200001440000000116313334615200017423 0ustar liggesuserscontext("package") test_that("load_pkg_description", { pkg <- load_pkg_description("noremotes") expect_equal(pkg$package, "noremotes") }) test_that("load_pkg_description tarball", { skip_on_cran() skip_if_offline() repos <- getOption("repos") if (length(repos) == 0) repos <- character() repos[repos == "@CRAN@"] <- "http://cran.rstudio.com" tmp_dir <- tempfile() dir.create(tmp_dir) on.exit(unlink(tmp_dir, recursive = TRUE)) out <- download.packages("pkgconfig", repos = repos, destdir = tmp_dir, quiet = TRUE)[[2]] pkg <- load_pkg_description(out) expect_equal(pkg$package, "pkgconfig") }) remotes/tests/testthat/test-install-remote.R0000644000176200001440000000320013453354776021005 0ustar liggesuserscontext("install-remotes.R") test_that("different_sha returns TRUE if remote or local sha is NA not found", { expect_true(different_sha(remote_sha = NA, local_sha = "4a2ea2")) expect_true(different_sha(remote_sha = "4a2ea2", local_sha = NA)) expect_true(different_sha(remote_sha = NA, local_sha = NA)) }) test_that("different_sha returns TRUE if remote_sha and local_sha are different", { expect_true(different_sha(remote_sha = "5b3fb3", local_sha = "4a2ea2")) }) test_that("different_sha returns FALSE if remote_sha and local_sha are the same", { expect_false(different_sha(remote_sha = "4a2ea2", local_sha = "4a2ea2")) }) test_that("local_sha returns NA if package is not installed", { expect_equal(local_sha("tsrtarst"), NA_character_) }) test_that("package2remotes looks for the DESCRIPTION in .libPaths", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() lib <- tempfile() dir.create(lib) expect_equal(package2remote("noremotes", lib = lib)$sha, NA_character_) # This is not a real package, so we can't actually build it install("noremotes", lib = lib, quiet = TRUE, build = FALSE, dependencies = FALSE, upgrade = FALSE, force = FALSE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), repos = getOption("repos"), type = getOption("pkgType")) expect_equal(package2remote("noremotes", lib = lib)$sha, "1.0.0") # Load the namespace, as packageDescription looks in loaded namespaces # first. withr::with_libpaths(lib, loadNamespace("noremotes") ) expect_equal(package2remote("noremotes")$sha, NA_character_) }) remotes/tests/testthat/submodule/0000755000176200001440000000000013621331470016731 5ustar liggesusersremotes/tests/testthat/submodule/NAMESPACE0000644000176200001440000000002413340057736020154 0ustar liggesusersexportPattern(".*") remotes/tests/testthat/submodule/DESCRIPTION0000644000176200001440000000027213340057736020450 0ustar liggesusersPackage: submodule Title: Foo Bar Description: Seriously. Foo Bar. Version: 1.0.0 Author: Gabor Csardi Maintainer: Gabor Csardi License: MIT + file LICENSE remotes/tests/testthat/test-parse-git.R0000644000176200001440000001130613340057736017736 0ustar liggesuserscontext("Parse git repo") test_that("pull request and latest release, via spec and URL", { expect_equal( parse_git_repo("r-lib/remotes#7")$ref, github_pull("7") ) expect_equal( parse_git_repo("https://github.com/r-lib/remotes/pull/7")$ref, github_pull("7") ) expect_equal( parse_git_repo("r-lib/remotes@*release")$ref, github_release() ) expect_equal( parse_git_repo("https://github.com/r-lib/remotes/releases/latest")$ref, github_release() ) }) test_that("parse_repo_spec trailing slash, issue #54", { expect_equal( parse_repo_spec("foo/bar/baz/"), parse_repo_spec("foo/bar/baz") ) }) test_that("parse_github_url() accepts all forms of URL (github.com and GHE)", { ## HTTPS expect_identical( parse_github_url("https://github.com/r-lib/remotes.git"), list(username = "r-lib", repo = "remotes", ref = "", pull = "", release = "") ) expect_identical( parse_github_url("https://github.ubc.ca/user/repo.git"), list(username = "user", repo = "repo", ref = "", pull = "", release = "") ) ## SSH expect_identical( parse_github_url("git@github.com:r-lib/remotes.git"), list(username = "r-lib", repo = "remotes", ref = "", pull = "", release = "") ) expect_identical( parse_github_url("git@github.ubc.ca:user/repo.git"), list(username = "user", repo = "repo", ref = "", pull = "", release = "") ) ## browser URLs expect_identical( parse_github_url("https://github.com/r-lib/remotes"), list(username = "r-lib", repo = "remotes", ref = "", pull = "", release = "") ) expect_identical( parse_github_url("https://github.ubc.ca/user/repo"), list(username = "user", repo = "repo", ref = "", pull = "", release = "") ) expect_identical( parse_github_url("https://github.com/r-lib/remotes/tree/i-am-a-branch"), list(username = "r-lib", repo = "remotes", ref = "i-am-a-branch", pull = "", release = "") ) expect_identical( parse_github_url("https://github.com/r-lib/remotes/commit/1234567"), list(username = "r-lib", repo = "remotes", ref = "1234567", pull = "", release = "") ) expect_identical( parse_github_url("https://github.com/r-lib/remotes/pull/108"), list(username = "r-lib", repo = "remotes", ref = "", pull = "108", release = "") ) expect_identical( parse_github_url("https://github.com/r-lib/remotes/releases/tag/1.0.0"), list(username = "r-lib", repo = "remotes", ref = "1.0.0", pull = "", release = "") ) expect_identical( parse_github_url("https://github.com/r-lib/remotes/releases/latest"), list(username = "r-lib", repo = "remotes", ref = "", pull = "", release = "*release") ) }) test_that("parse_repo_spec catches invalid spec", { expect_error( parse_repo_spec("/$&@R64&3"), "Invalid git repo specification" ) }) test_that("parse_repo_spec, github", { cases <- list( list("user/repo"), list("pkg=user/repo", package = "pkg"), list("pkg=user/repo", package = "pkg"), list("user/repo/subdir", subdir = "subdir"), list("user/repo@badcafe", ref = "badcafe"), list("user/repo#123", ref = github_pull("123")), list("user/repo@*release", ref = github_release()), list("pkg=user/repo/subdir", package = "pkg", subdir = "subdir"), list("pkg=user/repo@badcafe", package = "pkg", ref = "badcafe"), list("pkg=user/repo#123", package = "pkg", ref = github_pull("123")), list("pkg=user/repo@*release", package = "pkg", ref = github_release()), # github url cases list("git@github.com:user/repo.git"), list("git@github.ubc.ca:user/repo.git"), list("https://github.com/user/repo"), list("https://github.ubc.ca/user/repo"), list("https://github.com/user/repo/tree/i-am-a-branch", ref = "i-am-a-branch"), list("https://github.com/user/repo/commit/1234567", ref = "1234567"), list("https://github.com/user/repo/pull/108", ref = github_pull("108")), list("https://github.com/user/repo/releases/tag/1.0.0", ref = "1.0.0"), list("https://github.com/user/repo/releases/latest", ref = github_release()), list("https://github.com/user/repo/releases/latest", ref = github_release()), list("https://github.com/foo/bar", username = "foo", repo = "bar"), list("git@github.com:foo/bar.git", username = "foo", repo = "bar"), # Username and repo can have hyphens in them list("git@github.com:foo-bar/baz-qux.git", username = "foo-bar", repo = "baz-qux") ) for (case in cases) { expect_equal_named_lists( p <- parse_git_repo(case[[1]]), utils::modifyList( list(username = "user", repo = "repo"), case[-1] ) ) } }) test_that("parse_git_repo errors on invalid GitHub input", { expect_error(parse_git_repo("https://github.com/r-lib"), "Invalid GitHub URL") }) remotes/tests/testthat/test-git.R0000644000176200001440000000253213352235013016614 0ustar liggesusers context("Git") test_that("git_extract_sha1_tar", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() sha <- "fbae60ced0afee0e7c0f8dc3b5b1bb48d303f3dd" url <- build_url( "api.github.com/repos/hadley/devtools/tarball", sha ) tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) download(tmp, url, auth_token = github_pat()) expect_equal( git_extract_sha1_tar(tmp), sha ) }) test_that("git not quiet", { mockery::stub(git, "check_git_path", "/foo/git") mockery::stub(git, "system", "0") expect_message( git(args = c("arg1", "arg2"), quiet = FALSE), "['\"]/foo/git['\"] arg1arg2" ) }) test_that("git error", { mockery::stub(git, "check_git_path", "/foo/git") mockery::stub(git, "system", structure("foo", status = "1")) expect_error(git(args = "arg"), "Command failed") }) test_that("git_path", { tmp <- tempfile() expect_error( git_path(tmp), "does not exist" ) cat("Hello", file = tmp) expect_equal(git_path(tmp), tmp) mockery::stub(git_path, "Sys.which", "") mockery::stub(git_path, "os_type", "windows") mockery::stub(git_path, "file.exists", FALSE) expect_null(git_path()) }) test_that("check_git_path", { mockery::stub(check_git_path, "git_path", NULL) expect_error( check_git_path(), "Git does not seem to be installed on your system" ) }) remotes/tests/testthat/invalidpkg/0000755000176200001440000000000013337012264017063 5ustar liggesusersremotes/tests/testthat/invalidpkg/DESCRIPTION0000644000176200001440000000033113337012264020566 0ustar liggesusersPackage: invalidpkg Title: Tools to make developing R code easier License: MIT Description: Package description. Author: Bugs Bunny Maintainer: Bugs Bunny Version: 1.0.0 Suggests: testthat foobar remotes/tests/testthat/test-install-dev.R0000644000176200001440000000307713476462075020301 0ustar liggesuserscontext("test-install-dev") test_that("install_dev works with GitHub URLs", { skip_on_cran() skip_if_offline() mockery::stub(install_dev, "install_github", identity) expect_equal(install_dev("dplyr"), "tidyverse/dplyr") expect_equal(install_dev("reprex"), "tidyverse/reprex") expect_equal(install_dev("mongolite"), "jeroen/mongolite") # only has a GH URL in BugReports expect_equal(install_dev("digest"), "eddelbuettel/digest") }) test_that("install_dev works with uset CRAN mirrors", { skip_on_cran() skip_if_offline() mockery::stub(install_dev, "install_github", identity) expect_equal(install_dev("dplyr", cran_url = NULL), "tidyverse/dplyr") expect_equal(install_dev("dplyr", cran_url = "@CRAN@"), "tidyverse/dplyr") }) test_that("install_dev fails if there is no URL field", { skip_on_cran() skip_if_offline() expect_error(install_dev("primerTree"), "Could not determine development repository") }) test_that("install_dev fails if there is no URL field with a GitHub, GitLab or Bitbucket URL", { skip_on_cran() skip_if_offline() expect_error(install_dev("XML"), "Could not determine development repository") }) test_that("install_dev works with GitLab URLs", { skip_on_cran() skip_if_offline() mockery::stub(install_dev, "install_gitlab", identity) expect_equal(install_dev("iemiscdata"), "iembry/iemiscdata") }) test_that("install_dev works with Bitbucket URLs", { skip_on_cran() skip_if_offline() mockery::stub(install_dev, "install_bitbucket", identity) expect_equal(install_dev("argparser"), "djhshih/argparser") }) remotes/tests/testthat/helper.R0000644000176200001440000000303013476462075016346 0ustar liggesusers skip_if_offline <- function(host = "httpbin.org", port = 80) { res <- tryCatch( pingr::ping_port(host, count = 1L, port = port), error = function(e) NA ) if (is.na(res)) skip("No internet connection") } skip_if_over_rate_limit <- function(by = 50) { tmp <- tempfile() download( tmp, "https://api.github.com/rate_limit", auth_token = github_pat() ) res <- json$parse_file(tmp)$rate$remaining if (is.null(res) || res <= by) skip("Over the GitHub rate limit") } expect_equal_named_lists <- function(object, expected, ...) { expect_true(!is.null(names(object)) && !is.null(names(expected))) expect_true(is.list(object) && is.list(expected)) object <- object[order(names(object))] expected <- expected[order(names(expected))] expect_equal(!!object, !!expected) } skip_without_package <- function(pkg) { if (!requireNamespace(pkg, quietly = TRUE)) { skip(paste("Need the", pkg, "package")) } } skip_without_program <- function(program) { if (Sys.which(program) == "") { skip(paste("Need the", program, "program")) } } test_temp_file <- function(fileext = "", pattern = "test-file-", envir = parent.frame()) { tmp <- tempfile(pattern = pattern, fileext = fileext) withr::defer( try(unlink(tmp, recursive = TRUE, force = TRUE), silent = TRUE), envir = envir) tmp } test_temp_dir <- function(pattern = "test-dir-", envir = parent.frame()) { tmp <- test_temp_file(pattern, envir = envir) dir.create(tmp, recursive = TRUE, showWarnings = FALSE) tmp } remotes/tests/testthat/test-install-bioc.R0000644000176200001440000000641013357643530020424 0ustar liggesuserscontext("install_bioc.R") test_that("bioc repo paths are parsed correctly", { expect_equal(parse_bioc_repo("devtools"), list(repo="devtools")) expect_equal(parse_bioc_repo("devtools#abc123"), list(repo="devtools", commit="abc123")) expect_equal(parse_bioc_repo("user:pass@devtools"), list(username = "user", password = "pass", repo="devtools")) expect_equal(parse_bioc_repo("devel/devtools"), list(release = "devel", repo="devtools")) expect_equal(parse_bioc_repo("3.1/devtools"), list(release = "3.1", repo="devtools")) expect_equal(parse_bioc_repo("release/devtools"), list(release = "release", repo="devtools")) expect_equal(parse_bioc_repo("user:pass@devtools#abc123"), list(username = "user", password = "pass", repo="devtools", commit = "abc123")) expect_error(parse_bioc_repo("user:pass@3.1/devtools#abc123"), "release and commit should not both be specified") expect_error(parse_bioc_repo("user@devtools"), "Invalid bioc repo") expect_error(parse_bioc_repo("user:@devtools"), "Invalid bioc repo") expect_error(parse_bioc_repo("@devtools"), "Invalid bioc repo") expect_error(parse_bioc_repo("devtools/"), "Invalid bioc repo") expect_error(parse_bioc_repo("junk/devtools"), "Invalid bioc repo") }) test_that("install_bioc with git2r", { skip_without_package("git2r") skip_on_cran() skip_if_offline() lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) mirror <- getOption("BioC_git", "https://git.bioconductor.org/packages") # This package has no dependencies or compiled code and is old install_bioc("MeasurementError.cor", mirror = mirror, git = "git2r", lib = lib, quiet = TRUE) expect_silent(packageDescription("MeasurementError.cor", lib.loc = lib)) expect_equal(packageDescription("MeasurementError.cor", lib.loc = lib)$RemoteType, "bioc_git2r") remote <- package2remote("MeasurementError.cor", lib = lib) expect_s3_class(remote, "remote") expect_s3_class(remote, "bioc_git2r_remote") expect_equal(format(remote), "Bioc") expect_equal(remote$mirror, mirror) expect_equal(remote$repo, "MeasurementError.cor") expect_equal(remote$release, "release") expect_true(!is.na(remote$sha) && nzchar(remote$sha)) expect_true(!is.na(remote$branch) && nzchar(remote$branch)) }) test_that("install_bioc with xgit", { skip_without_program("git") skip_on_cran() skip_if_offline() lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) mirror <- getOption("BioC_git", "https://git.bioconductor.org/packages") # This package has no dependencies or compiled code and is old install_bioc("MeasurementError.cor", mirror = mirror, git = "external", lib = lib, quiet = TRUE) expect_silent(packageDescription("MeasurementError.cor", lib.loc = lib)) expect_equal(packageDescription("MeasurementError.cor", lib.loc = lib)$RemoteType, "bioc_xgit") remote <- package2remote("MeasurementError.cor", lib = lib) expect_s3_class(remote, "remote") expect_s3_class(remote, "bioc_xgit_remote") expect_equal(format(remote), "Bioc") expect_equal(remote$mirror, mirror) expect_equal(remote$repo, "MeasurementError.cor") expect_equal(remote$release, "release") expect_true(!is.na(remote$sha) && nzchar(remote$sha)) expect_true(!is.na(remote$branch) && nzchar(remote$branch)) }) remotes/tests/testthat/test-install-bitbucket.R0000644000176200001440000000472413352235013021456 0ustar liggesusers context("Install from Bitbucket") test_that("", { skip_on_cran() skip_if_offline() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) install_bitbucket("jimhester/withr", lib = lib, quiet = TRUE) expect_silent(packageDescription("withr", lib.loc = lib)) expect_equal( packageDescription("withr", lib.loc = lib)$RemoteRepo, "withr") remote <- package2remote("withr", lib = lib) expect_s3_class(remote, "remote") expect_s3_class(remote, "bitbucket_remote") expect_equal(format(remote), "Bitbucket") expect_equal(remote$host, "api.bitbucket.org/2.0") expect_equal(remote$repo, "withr") expect_equal(remote$username, "jimhester") expect_equal(remote$ref, "master") expect_true(!is.na(remote$sha) && nzchar(remote$sha)) }) test_that("remote_download.bitbucket_remote", { x <- list(username = "csardigabor", repo = "pkgconfig", ref = "master", host = "api.bitbucket.org/2.0") mockery::stub( remote_download.bitbucket_remote, "download", function(...) { } ) mockery::stub( remote_download.bitbucket_remote, "bitbucket_download_url", function(...) { } ) expect_message( remote_download.bitbucket_remote(x), "Downloading bitbucket repo csardigabor/pkgconfig@master" ) }) test_that("remote_metadata.bitbucket_remote", { expect_equal( remote_metadata.bitbucket_remote(list(), sha = "foobar")$RemoteSha, "foobar" ) }) test_that("bitbucket passwords", { if (Sys.getenv("BITBUCKET_PASSWORD") == "") { skip("Need BitBucket credentials") } skip_on_cran() skip_if_offline() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) install_bitbucket("jimhester/falsy", lib = lib, quiet = TRUE) expect_silent(packageDescription("falsy", lib.loc = lib)) expect_equal( packageDescription("falsy", lib.loc = lib)$RemoteRepo, "falsy") }) test_that("more bitbucket password", { x <- list( username = "username", repo = "repo", ref = "master", auth_user = "foo", password = "pass", host = "api.bitbucket.com/2.0" ) mockery::stub( remote_download.bitbucket_remote, "download", function(dest, src, basic_auth) basic_auth) mockery::stub( remote_download.bitbucket_remote, "bitbucket_download_url", function(...) { }) expect_equal( remote_download.bitbucket_remote(x), list(user = "foo", password = "pass") ) }) remotes/tests/testthat/test-decompress.R0000644000176200001440000000547013476462061020215 0ustar liggesusers context("Decompress") test_that("decompress various file types", { types <- c("zip", "tar", "tar.gz", "tgz") for (type in types) { fname <- paste0("foo.", type) archive <- file.path("archives", fname) dec <- tempfile() decompress(archive, dec) expect_true( file.exists(file.path(dec, "foo", "R", "foo.R")), info = type ) } }) test_that("decompress with internal unzip", { types <- c("zip", "tar", "tar.gz", "tgz") for (type in types) { fname <- paste0("foo.", type) archive <- file.path("archives", fname) dec <- tempfile() on.exit(unlink(dec, recursive = TRUE), add = TRUE) mockery::stub( decompress, "getOption", function(x, default = NULL) { if (x == "unzip") { "internal" } else { if (missing(default) || x %in% names(options())) { options()[[x]] } else { default } } } ) decompress(archive, dec) expect_true( file.exists(file.path(dec, "foo", "R", "foo.R")), info = type ) } }) test_that("decompress errors on unknown file types", { tmp <- tempfile(fileext = ".foobar") on.exit(unlink(tmp, recursive = TRUE), add = TRUE) cat("surprise!", file = tmp) expect_error( decompress(tmp, tempdir()), "Don't know how to decompress" ) }) test_that("source_pkg", { foo_dir <- file.path("archives", "foo") expect_equal(source_pkg(foo_dir), foo_dir) bad_dir <- "archives" expect_error( source_pkg(bad_dir), "Does not appear to be an R package" ) foo_tgz <- file.path("archives", "foo.tar.gz") pkg_dir <- source_pkg(foo_tgz) on.exit(unlink(pkg_dir, recursive = TRUE), add = TRUE) expect_true(file.exists(file.path(pkg_dir, "R", "foo.R"))) expect_true(file.exists(file.path(pkg_dir, "configure"))) skip_on_os("windows") expect_match( as.character(file.info(file.path(pkg_dir, "configure"))$mode), "7.." ) }) test_that("getrootdir", { cases <- list( list(c("foo/bar", "foo/"), "foo"), list(c("/foo/bar/baz", "/foo/bar"), "/foo"), list(c("this/foo/bar", "this/that"), "this"), list(c("", "yes"), ".") ) for (c in seq_along(cases)) { expect_identical(getrootdir(cases[[c]][[1]]), cases[[c]][[2]], info = c) } expect_error(getrootdir(character())) }) test_that("my_unzip respects options('unzip')", { mockery::stub(my_unzip, "utils::unzip", function(...) int <<- TRUE) mockery::stub(my_unzip, "system_check", function(...) int <<- FALSE) int <- NULL withr::with_options(c("unzip" = "internal"), my_unzip("blah", "tg")) expect_true(int) int <- NULL withr::with_options(c("unzip" = ""), my_unzip("blah", "tg")) expect_true(int) int <- NULL withr::with_options(c("unzip" = "somethingelse"), my_unzip("blah", "tg")) expect_false(int) }) remotes/tests/testthat/test-bioc.R0000644000176200001440000000652113621330205016745 0ustar liggesusers context("BioConductor packages") test_that("bioc is standalone", { ## baseenv() makes sure that the remotes package env is not used env <- new.env(parent = baseenv()) env$bioc <- bioconductor stenv <- env$bioc$.internal objs <- ls(stenv, all.names = TRUE) funs <- Filter(function(x) is.function(stenv[[x]]), objs) funobjs <- mget(funs, stenv) expect_message( mapply(codetools::checkUsage, funobjs, funs, MoreArgs = list(report = message)), NA) }) test_that("we can parse the YAML config", { skip_if_offline() expect_silent(yaml <- bioconductor$get_yaml_config(forget = TRUE)) expect_true(length(yaml) > 20) map <- bioconductor$get_version_map() expect_true("release" %in% map$bioc_status) expect_true("devel" %in% map$bioc_status) expect_true("future" %in% map$bioc_status) expect_true(inherits(bioconductor$get_release_version(), "package_version")) expect_true(inherits(bioconductor$get_devel_version(), "package_version")) }) test_that("internal map is current", { # If there is a new bioc version out, then we'll error. # This is to notify us that we need to update the package's # internal map. skip_on_cran() skip_if_offline() expect_equal( bioconductor$get_release_version(), package_version("3.10")) }) test_that("set of repos are correct", { # Compare our set of repos to the set returned by BiocManager. # They should match. If they don't we need to update the package. skip_if_offline() skip_on_cran() skip_without_package("BiocManager") withr::local_envvar(list(BIOC_VERSION = NA, BIOC_MIRROR = NA)) withr::local_options(list(Bioc_mirror = NULL)) # We can only run this if the matching R version is new enough, # so we skip on other platforms dev_ver <- bioconductor$get_devel_version() map <- bioconductor$get_version_map() required <- map$r_version[map$bioc_version == dev_ver] if (!getRversion()[, 1:2] %in% required) skip("Needs newer R version") my_repos <- bioconductor$get_repos(dev_ver) bm_repos <- asNamespace("BiocManager")$repositories(version = dev_ver) bm_repos <- bm_repos[names(bm_repos) != "CRAN"] expect_equal(sort(names(my_repos)), sort(names(bm_repos))) expect_equal(my_repos, bm_repos[names(my_repos)]) }) test_that("bioc_install_repos", { expect_equal( bioc_install_repos("3.1.0"), bioc_install_repos(bioc_ver = "3.0") ) expect_equal( bioc_install_repos("3.1.1"), bioc_install_repos(bioc_ver = "3.0") ) expect_equal( bioc_install_repos("3.2"), bioc_install_repos(bioc_ver = "3.2") ) expect_equal( bioc_install_repos("3.3"), bioc_install_repos(bioc_ver = "3.4") ) expect_equal( bioc_install_repos("3.4"), bioc_install_repos(bioc_ver = "3.6") ) expect_equal( bioc_install_repos("3.5"), bioc_install_repos(bioc_ver = "3.8") ) # This particular version needs to do a connection test for https support skip_on_cran() skip_if_offline() expect_equal( bioc_install_repos("3.2.2"), bioc_install_repos(bioc_ver = "3.2") ) }) test_that("CRANextras exists in versions prior to Bioc 3.6", { expect_equal( names(bioc_install_repos(bioc_ver = "3.5")), c("BioCsoft", "BioCann", "BioCexp", "BioCextra") ) expect_equal( names(bioc_install_repos(bioc_ver = "3.6")), c("BioCsoft", "BioCann", "BioCexp") ) }) remotes/tests/testthat/test-json.R0000644000176200001440000001116613476462075017026 0ustar liggesusers context("JSON parser") test_that("JSON is standalone", { ## baseenv() makes sure that the remotes package env is not used env <- new.env(parent = baseenv()) env$json <- json stenv <- env$json$.internal objs <- ls(stenv, all.names = TRUE) funs <- Filter(function(x) is.function(stenv[[x]]), objs) funobjs <- mget(funs, stenv) expect_message( mapply(codetools::checkUsage, funobjs, funs, MoreArgs = list(report = message)), NA) }) test_that("JSON parser scalars", { expect_equal(json$parse('"foobar"'), "foobar" ) expect_equal(json$parse('""'), "") expect_equal(json$parse("42"), 42) expect_equal(json$parse("-42"), -42) expect_equal(json$parse("42.42"), 42.42) expect_equal(json$parse("1e2"), 1e2) expect_equal(json$parse("-0.1e-2"), -0.1e-2) expect_equal(json$parse('null'), NULL) expect_equal(json$parse('true'), TRUE) expect_equal(json$parse('false'), FALSE) }) test_that("JSON parser arrays", { cases <- list( list("[1,2,3]", list(1,2,3)), list("[1]", list(1)), list("[]", list()), list('["foo"]', list("foo")), list('["foo", 1, "bar", true]', list("foo", 1, "bar", TRUE)) ) for (c in cases) { r <- json$parse(c[[1]]) expect_equal(r, c[[2]], info = c[[1]]) } }) test_that("JSON parser nested arrays", { cases <- list( list('[1,2, ["foo", "bar"], 3]', list(1,2, list("foo","bar"), 3)), list('[ [ [ 1 ] ] ]', list(list(list(1)))), list('[ [ [ ] ] ]', list(list(list()))) ) for (c in cases) { r <- json$parse(c[[1]]) expect_equal(r, c[[2]], info = c[[1]]) } }) test_that("JSON parser, real examples", { inp <- ' { "sha": "e183ccdc515bbb8e7f32d8d16586aed9eea6de0b", "commit": { "author": { "name": "Hadley Wickham", "email": "h.wickham@gmail.com", "date": "2015-03-30T13:55:18Z" }, "committer": { "name": "Hadley Wickham", "email": "h.wickham@gmail.com", "date": "2015-03-30T13:55:18Z" }, "message": "Merge pull request #22 from paulstaab/master\\n\\nImprove error message for assertions of length 0", "tree": { "sha": "f2e840b7a134fbc118597842992aa50048e0fa04", "url": "https://api.github.com/repos/hadley/assertthat/git/trees/f2e840b7a134fbc118597842992aa50048e0fa04" }, "url": "https://api.github.com/repos/hadley/assertthat/git/commits/e183ccdc515bbb8e7f32d8d16586aed9eea6de0b", "comment_count": 0 } }' exp <- list( sha = "e183ccdc515bbb8e7f32d8d16586aed9eea6de0b", commit = list( author = list( name = "Hadley Wickham", email = "h.wickham@gmail.com", date = "2015-03-30T13:55:18Z"), committer = list( name = "Hadley Wickham", email = "h.wickham@gmail.com", date = "2015-03-30T13:55:18Z"), message = "Merge pull request #22 from paulstaab/master\\n\\nImprove error message for assertions of length 0", tree = list( sha = "f2e840b7a134fbc118597842992aa50048e0fa04", url = "https://api.github.com/repos/hadley/assertthat/git/trees/f2e840b7a134fbc118597842992aa50048e0fa04" ), url = "https://api.github.com/repos/hadley/assertthat/git/commits/e183ccdc515bbb8e7f32d8d16586aed9eea6de0b", comment_count = 0 ) ) expect_equal(json$parse(inp), exp) }) test_that("JSON parser, errors", { expect_error( json$parse("[1,2,3,"), "EXPECTED value GOT EOF" ) expect_error( json$parse('{ 123: "foo" }'), "EXPECTED string GOT 123" ) expect_error( json$parse('{ "foo" "foobar" }'), 'EXPECTED : GOT "foobar"' ) expect_error( json$parse('{ "foo": "foobar" "foo2": "foobar2" }'), 'EXPECTED , or } GOT "foo2"' ) expect_error( json$parse('[1,2,3 4]'), 'EXPECTED , GOT 4' ) }) test_that("get_json_sha", { inp <- ' { "sha": "e183ccdc515bbb8e7f32d8d16586aed9eea6de0b", "author": { "name": "Hadley Wickham", "email": "h.wickham@gmail.com", "date": "2015-03-30T13:55:18Z" }, }' expect_identical( get_json_sha(inp), "e183ccdc515bbb8e7f32d8d16586aed9eea6de0b") inp2 <- ' { "sha": "e183ccdc515bbb8e7f32d8d16586aed9eea6de0b", "author": { "name": "Hadley Wickham", "email": "h.wickham@gmail.com", "date": "2015-03-30T13:55:18Z" }, }' expect_identical( get_json_sha(inp2), "e183ccdc515bbb8e7f32d8d16586aed9eea6de0b") inp3 <- ' { "nosha": "e183ccdc515bbb8e7f32d8d16586aed9eea6de0b", "author": { "name": "Hadley Wickham", "email": "h.wickham@gmail.com", "date": "2015-03-30T13:55:18Z" }, }' expect_identical( get_json_sha(inp3), NA_character_) }) remotes/tests/testthat/test-install-git.R0000644000176200001440000001254513503143725020273 0ustar liggesusers context("Install from git repo") test_that("install_git with git2r", { skip_on_cran() skip_if_offline() skip_if_not_installed("git2r") Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) url <- "https://github.com/gaborcsardi/pkgconfig.git" install_git(url, lib = lib, git = "git2r", quiet = TRUE) expect_silent(packageDescription("pkgconfig", lib.loc = lib)) expect_equal( packageDescription("pkgconfig", lib.loc = lib)$RemoteUrl, url ) remote <- package2remote("pkgconfig", lib = lib) expect_s3_class(remote, "remote") expect_s3_class(remote, "git2r_remote") expect_equal(format(remote), "Git") expect_equal(remote$url, url) expect_equal(remote$ref, NULL) expect_equal(remote_sha(remote), remote$sha) expect_true(!is.na(remote$sha) && nzchar(remote$sha)) }) test_that("install_git with git2r and ref", { skip_on_cran() skip_if_offline() skip_if_over_rate_limit() skip_if_not_installed("git2r") Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) url <- "https://github.com/gaborcsardi/pkgconfig.git" install_git(url, lib = lib, ref = "travis", git = "git2r", quiet = TRUE) expect_silent(packageDescription("pkgconfig", lib.loc = lib)) expect_equal( packageDescription("pkgconfig", lib.loc = lib)$RemoteUrl, url ) remote <- package2remote("pkgconfig", lib = lib) expect_s3_class(remote, "remote") expect_s3_class(remote, "git2r_remote") expect_equal(format(remote), "Git") expect_equal(remote$url, url) expect_equal(remote$ref, "travis") expect_equal(remote_sha(remote), remote$sha) expect_true(!is.na(remote$sha) && nzchar(remote$sha)) }) test_that("install_git with command line git", { skip_on_cran() skip_if_offline() if (is.null(git_path())) skip("git is not installed") Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) url <- "https://github.com/cran/falsy.git" install_git(url, git = "external", lib = lib, quiet = TRUE) expect_silent(packageDescription("falsy", lib.loc = lib)) expect_equal(packageDescription("falsy", lib.loc = lib)$RemoteUrl, url) remote <- package2remote("falsy", lib = lib) expect_s3_class(remote, "remote") expect_s3_class(remote, "xgit_remote") expect_equal(format(remote), "Git") expect_equal(remote$url, url) expect_equal(remote$ref, NULL) expect_true(!is.na(remote$sha) && nzchar(remote$sha)) }) test_that("install_git with command line git and tag ref", { skip_on_cran() skip_if_offline() if (is.null(git_path())) skip("git is not installed") Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) url <- "https://github.com/cran/falsy.git" install_git(url, ref = "1.0", git = "external", lib = lib, quiet = TRUE) expect_silent(packageDescription("falsy", lib.loc = lib)) expect_equal(packageDescription("falsy", lib.loc = lib)$RemoteUrl, url) remote <- package2remote("falsy", lib = lib) expect_s3_class(remote, "remote") expect_s3_class(remote, "xgit_remote") expect_equal(format(remote), "Git") expect_equal(remote$url, url) expect_equal(remote$ref, "1.0") expect_true(!is.na(remote$sha) && nzchar(remote$sha)) }) test_that("install_git with command line git and full SHA ref", { skip_on_cran() skip_if_offline() if (is.null(git_path())) skip("git is not installed") Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) url <- "https://github.com/cran/falsy.git" install_git(url, ref = "0f39d9eb735bf16909831c0bb129063dda388375", git = "external", lib = lib, quiet = TRUE) expect_silent(packageDescription("falsy", lib.loc = lib)) expect_equal(packageDescription("falsy", lib.loc = lib)$RemoteUrl, url) remote <- package2remote("falsy", lib = lib) expect_s3_class(remote, "remote") expect_s3_class(remote, "xgit_remote") expect_equal(format(remote), "Git") expect_equal(remote$url, url) expect_equal(remote$ref, "0f39d9eb735bf16909831c0bb129063dda388375") expect_true(!is.na(remote$sha) && nzchar(remote$sha)) }) test_that("remote_sha.xgit remote returns the SHA if it exists", { skip_on_cran() skip_if_offline() if (is.null(git_path())) skip("git is not installed") url <- "https://github.com/cran/falsy.git" # works with tags remote <- git_remote(url, ref = "1.0", git = "external") expect_equal(remote_sha(remote), "0f39d9eb735bf16909831c0bb129063dda388375") # works with full SHAs remote <- git_remote(url, ref = "26a36cf957a18569e311ef75b6f61f822de945ef", git = "external") expect_equal(remote_sha(remote), "26a36cf957a18569e311ef75b6f61f822de945ef") }) test_that("remote_metadata.xgit_remote", { r <- remote_metadata.xgit_remote( list(url = "foo", subdir = "foo2", ref = "foo3") ) e <- list( RemoteType = "xgit", RemoteUrl = "foo", RemoteSubdir = "foo2", RemoteRef = "foo3", RemoteSha = NULL, RemoteArgs = NULL ) expect_equal(r, e) }) test_that("remote_metadata.git2r_remote", { r <- remote_metadata.git2r_remote( list(url = "foo", subdir = "foo2", ref = "foo3") ) e <- list( RemoteType = "git2r", RemoteUrl = "foo", RemoteSubdir = "foo2", RemoteRef = "foo3", RemoteSha = NULL ) expect_equal(r, e) }) remotes/tests/testthat/test-devel.R0000644000176200001440000000230313332603140017122 0ustar liggesuserscontext("devel") test_that("has_devel", { # All platforms when tests are run should have a compiler expect_true(has_devel()) # has_devel should return FALSE if an error occurs from has_devel2 mockery::stub( has_devel, "has_devel2", function(...) stop("failed")) expect_false(has_devel()) }) test_that("has_devel2", { # has_devel2 should error if an error occurs from R CMD SHLIB mockery::stub( has_devel2, "R", function(...) stop("failed")) expect_error(has_devel2()) }) test_that("missing_devel_warning", { mockery::stub( missing_devel_warning, "has_devel2", function(...) FALSE) expect_warning( missing_devel_warning("noremotes"), "has compiled code, but no suitable compiler") # Windows mockery::stub( missing_devel_warning, "sys_type", function() "windows") expect_warning( missing_devel_warning("noremotes"), "Install Rtools") # MacOS mockery::stub( missing_devel_warning, "sys_type", function() "macos") expect_warning( missing_devel_warning("noremotes"), "Install XCode") # Linux mockery::stub( missing_devel_warning, "sys_type", function() "linux") expect_warning( missing_devel_warning("noremotes"), "Install compilers") }) remotes/tests/testthat/test-utils.R0000644000176200001440000001132513476462075017212 0ustar liggesusers context("Utilities") test_that("%||%", { expect_equal(NULL %||% "foo", "foo") expect_equal("foo" %||% "bar", "foo") expect_equal(NULL %||% NULL, NULL) }) test_that("trim_ws", { expect_equal(trim_ws("foobar"), "foobar") expect_equal(trim_ws(" foobar"), "foobar") expect_equal(trim_ws(" foobar"), "foobar") expect_equal(trim_ws("foobar "), "foobar") expect_equal(trim_ws("foobar "), "foobar") expect_equal(trim_ws(" foobar "), "foobar") expect_equal(trim_ws(" foobar "), "foobar") expect_equal(trim_ws(character()), character()) expect_equal(trim_ws(c("1", "2")), c("1", "2")) expect_equal(trim_ws(c(" 1", "2")), c("1", "2")) expect_equal(trim_ws(c("1 ", "2")), c("1", "2")) expect_equal(trim_ws(c(" 1 ", " 2")), c("1", "2")) expect_equal(trim_ws(c("1", " 2 ")), c("1", "2")) expect_equal(trim_ws(c("1 ", "2 ")), c("1", "2")) expect_equal(trim_ws(c("1 ", " 2")), c("1", "2")) }) test_that("is_bioconductor", { D <- load_pkg_description(test_path("Biobase")) expect_true(is_bioconductor(D)) D <- load_pkg_description(test_path("MASS")) expect_false(is_bioconductor(D)) }) test_that("pkg_installed", { expect_true(pkg_installed("methods")) expect_false(pkg_installed("there-is-no-such-package")) if (pkg_installed("codetools")) { tryCatch( { unloadNamespace("codetools") expect_true(pkg_installed("codetools")) expect_false("codetools" %in% loadedNamespaces()) }, error = function(e) { } ) } }) test_that("in_dir", { tmp <- tempfile() dir.create(tmp) ## We need the basename, because of the symbolic links wd <- getwd() expect_equal( basename(in_dir(tmp, getwd())), basename(tmp) ) expect_equal(getwd(), wd) in_dir2 <- with_something(setwd) wd <- getwd() expect_equal( basename(in_dir2(tmp, getwd())), basename(tmp) ) expect_equal(getwd(), wd) }) # Adapted from https://github.com/gisle/mime-base64/blob/cf23d49e517c6ed8f4b24295f63721e8c9935010/t/base64.t test_that("base64_decode", { decode_tests <- c( 'YWE=' = 'aa', ' YWE=' = 'aa', 'Y WE=' = 'aa', 'YWE= ' = 'aa', "Y\nW\r\nE=" = 'aa', 'YWE=====' = 'aa', # extra padding 'YWE' = 'aa', # missing padding 'YWFh====' = 'aaa', 'YQ' = 'a', 'Y' = '', 'x==' = '' ) for (i in seq_along(decode_tests)) { encoded <- names(decode_tests)[[i]] expected <- decode_tests[[i]] decoded <- base64_decode(encoded) expect_equal(decoded, expected) } }) test_that("windows untar, --force-local errors", { do <- function(has, tar_result) { withr::local_envvar(c(TAR = "")) calls <- 0 mockery::stub(untar, "system2", if (has) "--force-local" else "nah") mockery::stub(untar, "os_type", "windows") mockery::stub(untar, "utils::untar", function(extras, ...) { calls <<- calls + 1L if (grepl("force-local", extras)) tar_result() else "ok" }) expect_equal(untar("foobar"), "ok") expect_equal(calls, 1 + has) } ## Has force-local but tar fails with it do(TRUE, function() stop("failed")) do(TRUE, function() 1L) do(TRUE, function() structure("blah", status = 1L)) ## Does not have force-local do(FALSE, function() stop("failed")) do(FALSE, function() 1L) do(FALSE, function() structure("blah", status = 1L)) }) test_that("directories works", { expect_equal(directories("foo"), character()) expect_equal(directories("foo/bar"), "foo") expect_equal(sort(directories("foo/bar/baz")), sort(c("foo", "foo/bar"))) expect_equal(directories(c("foo/bar", "foo/baz")), "foo") expect_equal(sort(directories(c("foo/bar/baz", "foo2/1", "foo3/bar/3"))), sort(c("foo", "foo/bar", "foo2", "foo3", "foo3/bar"))) }) test_that("in_r_build_ignore works", { tf <- tempfile() on.exit(unlink(tf)) writeLines( c("^foo$", "^blah/xyz" ), tf) expect_equal( unname( in_r_build_ignore(c("foo/bar/baz", "R/test.R"), tf) ), c(TRUE, FALSE) ) expect_equal( unname( in_r_build_ignore(c("foo", "blah", "blah/abc", "blah/xyz", "R/test.R"), tf) ), c(TRUE, FALSE, FALSE, TRUE, FALSE) ) }) test_that("dev_split_ref works", { expect_equal(dev_split_ref("DT")[["pkg"]], "DT") expect_equal(dev_split_ref("remotes")[["ref"]], "") expect_equal(dev_split_ref("with.dot")[["pkg"]], "with.dot") expect_equal(dev_split_ref("with2")[["pkg"]], "with2") expect_equal(dev_split_ref("with@v1.2.1")[["ref"]], "@v1.2.1") expect_equal(dev_split_ref("with@v1.0.0.999")[["ref"]], "@v1.0.0.999") expect_equal(dev_split_ref("with@v1.0.0.999")[["pkg"]], "with") expect_equal(dev_split_ref("with#279")[["ref"]], "#279") expect_equal(dev_split_ref("with#1")[["pkg"]], "with") }) remotes/tests/testthat/test-dcf.R0000644000176200001440000000126513243326354016577 0ustar liggesusers context("DCF files") test_that("read_dcf and write_dcf", { DESC <- "Package: foobar Description: With continuation lines. Like . This one. . And this one. Title: Foo Bar " tmp <- tempfile() cat(DESC, file = tmp) dcf <- read_dcf(tmp) expect_equal(length(dcf), 3) expect_equal(names(dcf), c("Package", "Description", "Title")) tmp2 <- tempfile() on.exit(unlink(tmp2), add = TRUE) write_dcf(tmp2, dcf) DESC2 <- readChar(tmp2, nchars = file.info(tmp2)$size) ## This is a workaround for a write.dcf bug ## And also windows line ending characters DESC2 <- gsub("\r\n", "\n", DESC2) DESC2 <- gsub("\n .\n ", "\n .\n ", DESC2) expect_equal(DESC, DESC2) }) remotes/tests/testthat/github-error-travis.txt0000644000176200001440000000046113621330347021415 0ustar liggesusers[1] "HTTP error 304.\n foobar\n\n Rate limit remaining: 0/5000\n Rate limit reset at: 2018-10-19 15:16:18 UTC\n\n To increase your GitHub API rate limit\n - Use `usethis::browse_github_pat()` to create a Personal Access Token.\n - Add `GITHUB_PAT` to your travis settings as an encrypted variable." remotes/tests/testthat/noremotes/0000755000176200001440000000000013332603140016740 5ustar liggesusersremotes/tests/testthat/noremotes/DESCRIPTION0000644000176200001440000000031513332603140020445 0ustar liggesusersPackage: noremotes Title: Tools to make developing R code easier License: MIT Description: Package description. Author: Bugs Bunny Maintainer: Bugs Bunny Version: 1.0.0 Suggests: testthat remotes/tests/testthat/test-install-svn.R0000644000176200001440000000650213352235013020304 0ustar liggesusers context("Install from SVN repositories") test_that("install_svn", { skip_on_cran() skip_if_offline() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) url <- "https://github.com/mangothecat/simplegraph/trunk" install_svn(url, lib = lib, quiet = TRUE) expect_silent(packageDescription("simplegraph", lib.loc = lib)) expect_equal( packageDescription("simplegraph", lib.loc = lib)$RemoteType, "svn") remote <- package2remote("simplegraph", lib = lib) expect_s3_class(remote, "remote") expect_s3_class(remote, "svn_remote") expect_equal(format(remote), "SVN") expect_equal(remote$url, url) expect_equal(remote$svn_subdir, NULL) expect_true(!is.na(remote$revision) && nzchar(remote$revision)) expect_equal(remote$args, NULL) }) test_that("install_svn branch", { skip_on_cran() skip_if_offline() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) url <- "https://github.com/mangothecat/simplegraph" install_svn( url, subdir = "branches/remotes-test", lib = lib, quiet = TRUE ) expect_silent(packageDescription("simplegraph", lib.loc = lib)) expect_equal( packageDescription("simplegraph", lib.loc = lib)$RemoteType, "svn") remote <- package2remote("simplegraph", lib = lib) expect_s3_class(remote, "remote") expect_s3_class(remote, "svn_remote") expect_equal(format(remote), "SVN") expect_equal(remote$url, url) expect_equal(remote$svn_subdir, "branches/remotes-test") expect_true(!is.na(remote$revision) && nzchar(remote$revision)) expect_equal(remote$args, NULL) }) test_that("install_svn subdir", { skip_on_cran() skip_if_offline() Sys.unsetenv("R_TESTS") lib <- tempfile() on.exit(unlink(lib, recursive = TRUE), add = TRUE) dir.create(lib) mockery::stub( install_svn, "install_remotes", function(remotes, ...) remotes) rem <- install_svn( "https://github.com/dmlc/xgboost/trunk", subdir = "R-package" ) expect_equal(rem[[1]]$svn_subdir, "R-package") }) test_that("remote_download.svn_remote error", { skip_on_cran() x <- list(url = "http://foo.bar.com") mockery::stub(remote_download.svn_remote, "system2", 1) expect_error( remote_download.svn_remote(x), "There seems to be a problem retrieving" ) }) test_that("downloading an SVN revision", { skip_on_cran() skip_if_offline() x <- list( url = "https://github.com/mangothecat/simplegraph/trunk", revision = "r28" ) bundle <- remote_download.svn_remote(x) on.exit(unlink(bundle), add = TRUE) expect_output( print(list.files(bundle)), "DESCRIPTION" ) }) test_that("downloading a wrong SVN revision", { skip_on_cran() skip_if_offline() x <- list( url = "https://github.com/mangothecat/simplegraph/trunk", revision = "xxx" ) expect_error( remote_download.svn_remote(x) ) }) test_that("svn_path", { tmp <- tempfile() expect_error( svn_path(tmp), "does not exist" ) cat("Hello", file = tmp) expect_equal(svn_path(tmp), tmp) mockery::stub(svn_path, "Sys.which", "") mockery::stub(svn_path, "os_type", "windows") mockery::stub(svn_path, "file.exists", FALSE) expect_error( svn_path(), "SVN does not seem to be installed on your system" ) }) remotes/tests/testthat/github-error-local.txt0000644000176200001440000000046113621330347021177 0ustar liggesusers[1] "HTTP error 304.\n foobar\n\n Rate limit remaining: 0/5000\n Rate limit reset at: 2018-10-19 15:16:18 UTC\n\n To increase your GitHub API rate limit\n - Use `usethis::browse_github_pat()` to create a Personal Access Token.\n - Use `usethis::edit_r_environ()` and add the token as `GITHUB_PAT`." remotes/tests/testthat/test-install.R0000644000176200001440000001465413621300305017503 0ustar liggesuserscontext("install") test_that("safe_build_package works with pkgbuild", { out <- tempfile() dir.create(out) on.exit(unlink(out, recursive = TRUE)) opts <- c("--no-resave-data", "--no-manual", "--no-build-vignettes") expect_equal( safe_build_package(test_path("noremotes"), build_opts = opts, build_manual = FALSE, build_vignettes = FALSE, out, quiet = TRUE, use_pkgbuild = TRUE), file.path(out, "noremotes_1.0.0.tar.gz")) }) test_that("safe_build_package works without pkgbuild", { out <- tempfile() dir.create(out) on.exit(unlink(out)) opts <- c("--no-resave-data", "--no-manual", "--no-build-vignettes") expect_equal( safe_build_package(test_path("noremotes"), build_opts = opts, build_manual = FALSE, build_vignettes = FALSE, out, quiet = TRUE, use_pkgbuild = FALSE), file.path(out, "noremotes_1.0.0.tar.gz")) }) test_that("safe_build_package fails appropriately with pkgbuild", { out <- tempfile() dir.create(out) on.exit(unlink(out, recursive = TRUE)) opts <- c("--no-resave-data", "--no-manual", "--no-build-vignettes") expect_error( safe_build_package(test_path("invalidpkg"), build_opts = opts, build_manual = FALSE, build_vignettes = FALSE, out, quiet = TRUE, use_pkgbuild = TRUE), "System command", class = "system_command_status_error" ) }) test_that("safe_build_package fails appropriately without pkgbuild", { out <- tempfile() dir.create(out) on.exit(unlink(out, recursive = TRUE)) opts <- c("--no-resave-data", "--no-manual", "--no-build-vignettes") capture.output( expect_error(fixed = TRUE, safe_build_package(test_path("invalidpkg"), build_opts = opts, build_manual = FALSE, build_vignettes = FALSE, out, quiet = TRUE, use_pkgbuild = FALSE), "Failed to `R CMD build` package" )) }) test_that("safe_build_package calls pkgbuild with appropriate arguments", { mockery::stub(safe_build_package, "pkgbuild::build", function(...) list(...)) expect_equal( safe_build_package( "foo", build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, "bar", quiet = TRUE, use_pkgbuild = TRUE), list("foo", dest_path = "bar", binary = FALSE, vignettes = FALSE, manual = FALSE, args = "--no-resave-data", quiet = TRUE) ) expect_equal( safe_build_package( "foo", build_opts = c("--no-resave-data", "--no-build-vignettes"), build_manual = TRUE, build_vignettes = FALSE, "bar", quiet = TRUE, use_pkgbuild = TRUE), list("foo", dest_path = "bar", binary = FALSE, vignettes = FALSE, manual = TRUE, args = "--no-resave-data", quiet = TRUE) ) expect_equal( safe_build_package( "foo", build_opts = c("--no-resave-data"), build_manual = TRUE, build_vignettes = TRUE, "bar", quiet = TRUE, use_pkgbuild = TRUE), list("foo", dest_path = "bar", binary = FALSE, vignettes = TRUE, manual = TRUE, args = "--no-resave-data", quiet = TRUE) ) }) test_that("should_error_for_warnings works", { # If both unset, should error -> TRUE withr::with_envvar(c("R_REMOTES_NO_ERRORS_FROM_WARNINGS" = NA, "_R_CHECK_FORCE_SUGGESTS_" = NA), expect_true(should_error_for_warnings()) ) # If no errors true, should error -> FALSE withr::with_envvar(c("R_REMOTES_NO_ERRORS_FROM_WARNINGS" = "true", "_R_CHECK_FORCE_SUGGESTS_" = NA), expect_false(should_error_for_warnings()) ) withr::with_envvar(c("R_REMOTES_NO_ERRORS_FROM_WARNINGS" = "1", "_R_CHECK_FORCE_SUGGESTS_" = NA), expect_false(should_error_for_warnings()) ) # If no errors unset, and force_suggests false, should error -> FALSE withr::with_envvar(c("R_REMOTES_NO_ERRORS_FROM_WARNINGS" = NA, "_R_CHECK_FORCE_SUGGESTS_" = "false"), expect_false(should_error_for_warnings()) ) withr::with_envvar(c("R_REMOTES_NO_ERRORS_FROM_WARNINGS" = NA, "_R_CHECK_FORCE_SUGGESTS_" = "0"), expect_false(should_error_for_warnings()) ) # If no errors unset, and force_suggests true, should error -> TRUE withr::with_envvar(c("R_REMOTES_NO_ERRORS_FROM_WARNINGS" = NA, "_R_CHECK_FORCE_SUGGESTS_" = "true"), expect_true(should_error_for_warnings()) ) withr::with_envvar(c("R_REMOTES_NO_ERRORS_FROM_WARNINGS" = NA, "_R_CHECK_FORCE_SUGGESTS_" = "1"), expect_true(should_error_for_warnings()) ) }) test_that("normalize_build_opts works", { build_opts <- c("--no-resave-data", "--no-manual", "--no-build-vignettes") # If build_vignettes = TRUE, should not include --no-build-vignettes expect_equal( normalize_build_opts(character(), build_vignettes = TRUE, build_manual = TRUE), character() ) expect_equal( normalize_build_opts("--no-build-vignettes", build_vignettes = TRUE, build_manual = TRUE), character() ) # If build_vignettes = FALSE, should always include --no-build-vignettes expect_equal( normalize_build_opts(character(), build_vignettes = FALSE, build_manual = TRUE), "--no-build-vignettes" ) expect_equal( normalize_build_opts("--no-build-vignettes", build_vignettes = FALSE, build_manual = TRUE), "--no-build-vignettes" ) # If build_manual = TRUE, should not include --no-manual expect_equal( normalize_build_opts(character(), build_vignettes = TRUE, build_manual = TRUE), character() ) expect_equal( normalize_build_opts("--no-manual", build_vignettes = TRUE, build_manual = TRUE), character() ) # If build_manual = FALSE, should always include --no-build-manual expect_equal( normalize_build_opts(character(), build_vignettes = TRUE, build_manual = FALSE), "--no-manual" ) expect_equal( normalize_build_opts("--no-manual", build_vignettes = TRUE, build_manual = FALSE), "--no-manual" ) # Other arguments are ignored regardless of build_manual and build_vignettes expect_equal( normalize_build_opts(build_opts, build_vignettes = FALSE, build_manual = FALSE), c("--no-resave-data", "--no-manual", "--no-build-vignettes") ) expect_equal( normalize_build_opts(build_opts, build_vignettes = TRUE, build_manual = FALSE), c("--no-resave-data", "--no-manual") ) expect_equal( normalize_build_opts(build_opts, build_vignettes = FALSE, build_manual = TRUE), c("--no-resave-data", "--no-build-vignettes") ) expect_equal( normalize_build_opts(build_opts, build_vignettes = TRUE, build_manual = TRUE), c("--no-resave-data") ) }) remotes/tests/testthat/test-package-deps.R0000644000176200001440000000453613476462075020404 0ustar liggesusers context("Package dependencies") test_that("parse_deps", { expect_null(parse_deps(NULL)) expect_null(parse_deps("")) expect_null(parse_deps(" ")) expect_null(parse_deps("\n")) expect_equal( parse_deps("devtools (>= 1.0.1)"), structure( list( name = "devtools", compare = ">=", version = "1.0.1"), row.names = 1L, class = "data.frame" ) ) expect_equal( parse_deps("devtools (>= 1.0.1), foobar, foobar2 (== 0.0.1)"), structure( list( name = c("devtools", "foobar", "foobar2"), compare = c(">=", NA, "=="), version = c("1.0.1", NA, "0.0.1")), row.names = 1:3, class = "data.frame" ) ) # Whitespace should be ignored expect_equal( parse_deps("devtools (>= 1.0.1) \n, foobar, foobar2 ( == 0.0.1 )"), structure( list( name = c("devtools", "foobar", "foobar2"), compare = c(">=", NA, "=="), version = c("1.0.1", NA, "0.0.1")), row.names = 1:3, class = "data.frame" ) ) expect_equal( parse_deps("package (>= 1.0.1) , package2 (< 0.0.1 ) "), structure( list( name = c("package", "package2"), compare = c(">=", "<"), version = c("1.0.1", "0.0.1")), row.names = 1:2, class = "data.frame" ) ) }) test_that("parse_deps errors", { expect_error(parse_deps(42), "is.character.*is not TRUE") expect_error( parse_deps("remotes (++ 1.0.0)"), "Invalid comparison operator in dependency" ) expect_error( parse_deps("remotes (>=1.0.0)"), "Invalid comparison operator in dependency" ) }) test_that("parse_deps omits R", { expect_equal( parse_deps("R (>= 2.15.3)"), structure( list( name = character(0), compare = character(0), version = character(0)), row.names = integer(0), class = "data.frame" ) ) expect_equal( parse_deps("R (>= 2.15.3), devtools (>= 1.0.1)"), structure( list( name = "devtools", compare = ">=", version = "1.0.1"), row.names = 2L, class = "data.frame" ) ) expect_equal( parse_deps("devtools (>= 1.0.1), R (>= 2.15.3)"), structure( list( name = "devtools", compare = ">=", version = "1.0.1"), row.names = 1L, class = "data.frame" ) ) }) remotes/tests/testthat/test-system.R0000644000176200001440000000063613243326354017370 0ustar liggesusers context("System commands") test_that("system_check", { mockery::stub(system_check, "system2", structure("output", status = 1)) expect_error( system_check("foobar", args = c("arg1", "arg2", quiet = TRUE)), "Command foobar failed" ) mockery::stub(system_check, "system2", 42) expect_error( system_check("foobar", args = c("arg1", "arg2", quiet = FALSE)), "Command foobar failed" ) }) remotes/tests/testthat.R0000644000176200001440000000007213243326354015061 0ustar liggesuserslibrary(testthat) library(remotes) test_check("remotes") remotes/vignettes/0000755000176200001440000000000013621331470013740 5ustar liggesusersremotes/vignettes/dependencies.Rmd0000644000176200001440000000532113362420362017034 0ustar liggesusers--- title: "Dependency resolution for R package development" author: "Jim Hester, Hadley Wickham, Gábor Csárdi" date: "`r Sys.Date()`" output: rmarkdown::html_vignette: keep_md: true vignette: > %\VignetteIndexEntry{Dependency resolution for R package development} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} --- # Package remotes Remotes, just like devtools, supports package dependency installation for packages not yet in a standard package repository such as [CRAN](https://cran.r-project.org) or [Bioconductor](http://bioconductor.org). You can mark any regular dependency defined in the `Depends`, `Imports`, `Suggests` or `Enhances` fields as being installed from a remote location by adding the remote location to `Remotes` in your `DESCRIPTION` file. This will cause remotes to download and install them from the specified location, instead of CRAN. The remote dependencies specified in `Remotes` should be described in the following form. ``` Remotes: [type::], [type2::] ``` The `type` is an optional parameter. If the type is missing the default is to install from GitHub. Additional remote dependencies should be separated by commas, just like normal dependencies elsewhere in the `DESCRIPTION` file. ### GitHub Because GitHub is the most commonly used unofficial package distribution in R, it's the default: ```yaml Remotes: r-lib/testthat ``` You can also specify a specific hash, tag, or pull request (using the same syntax as `install_github()` if you want a particular commit. Otherwise the latest commit on the master branch is used. ```yaml Remotes: r-lib/httr@v0.4, klutometis/roxygen#142, r-lib/testthat@c67018fa4970 ``` The special `@*release` syntax will install the latest release: ```yaml Remotes: r-lib/testthat@*release ``` A type of 'github' can be specified, but is not required ```yaml Remotes: github::tidyverse/ggplot2 ``` ### Other sources All of the currently supported install sources are available, see the 'See Also' section in `?install_github` for a complete list. ```yaml # GitLab Remotes: gitlab::jimhester/covr # Git Remotes: git::git@bitbucket.org:dannavarro/lsr-package.git # Bitbucket Remotes: bitbucket::sulab/mygene.r@default, dannavarro/lsr-package # Bioconductor Remotes: bioc::3.3/SummarizedExperiment#117513, bioc::release/Biobase # SVN Remotes: svn::https://github.com/tidyverse/stringr # URL Remotes: url::https://github.com/tidyverse/stringr/archive/master.zip # Local Remotes: local::/pkgs/testthat ``` ### CRAN submission When you submit your package to CRAN, all of its dependencies must also be available on CRAN. For this reason, `devtools::release()` will warn you if you try to release a package with a `Remotes` field. remotes/R/0000755000176200001440000000000013621330472012132 5ustar liggesusersremotes/R/package-deps.R0000644000176200001440000000221013476462075014611 0ustar liggesusers parse_deps <- function(string) { if (is.null(string)) return() stopifnot(is.character(string), length(string) == 1) if (grepl("^\\s*$", string)) return() # Split by commas with surrounding whitespace removed pieces <- strsplit(string, "[[:space:]]*,[[:space:]]*")[[1]] # Get the names names <- gsub("\\s*\\(.*?\\)", "", pieces) names <- gsub("^\\s+|\\s+$", "", names) # Get the versions and comparison operators versions_str <- pieces have_version <- grepl("\\(.*\\)", versions_str) versions_str[!have_version] <- NA compare <- sub(".*\\(\\s*(\\S+)\\s+.*\\s*\\).*", "\\1", versions_str) versions <- sub(".*\\(\\s*\\S+\\s+(\\S*)\\s*\\).*", "\\1", versions_str) # Check that non-NA comparison operators are valid compare_nna <- compare[!is.na(compare)] compare_valid <- compare_nna %in% c(">", ">=", "==", "<=", "<") if(!all(compare_valid)) { stop("Invalid comparison operator in dependency: ", paste(compare_nna[!compare_valid], collapse = ", ")) } deps <- data.frame(name = names, compare = compare, version = versions, stringsAsFactors = FALSE) # Remove R dependency deps[names != "R", ] } remotes/R/utils.R0000644000176200001440000003146713503160125013423 0ustar liggesusers `%||%` <- function (a, b) if (!is.null(a)) a else b `%:::%` <- function (p, f) get(f, envir = asNamespace(p)) `%::%` <- function (p, f) get(f, envir = asNamespace(p)) viapply <- function(X, FUN, ..., USE.NAMES = TRUE) { vapply(X, FUN, integer(1L), ..., USE.NAMES = USE.NAMES) } vlapply <- function(X, FUN, ..., USE.NAMES = TRUE) { vapply(X, FUN, logical(1L), ..., USE.NAMES = USE.NAMES) } rcmd <- function(cmd, args, path = R.home("bin"), quiet, fail_on_status = TRUE) { if (os_type() == "windows") { real_cmd <- file.path(path, "Rcmd.exe") args <- c(cmd, args) } else { real_cmd <- file.path(path, "R") args <- c("CMD", cmd, args) } stdoutfile <- tempfile() stderrfile <- tempfile() on.exit(unlink(c(stdoutfile, stderrfile), recursive = TRUE), add = TRUE) status <- system2(real_cmd, args, stderr = stderrfile, stdout = stdoutfile) out <- tryCatch(readLines(stdoutfile, warn = FALSE), error = function(x) "") err <- tryCatch(readLines(stderrfile, warn = FALSE), error = function(x) "") if (fail_on_status && status != 0) { cat("STDOUT:\n") cat(out, sep = "\n") cat("STDERR:\n") cat(err, sep = "\n") stop(sprintf("Error running '%s' (status '%i')", cmd, status), call. = FALSE) } if (!quiet) { cat(out, sep = "\n") } list(stdout = out, stderr = err, status = status) } is_bioconductor <- function(x) { !is.null(x$biocviews) } trim_ws <- function(x) { gsub("^[[:space:]]+|[[:space:]]+$", "", x) } set_envvar <- function(envs) { if (length(envs) == 0) return() stopifnot(is.named(envs)) old <- Sys.getenv(names(envs), names = TRUE, unset = NA) set <- !is.na(envs) both_set <- set & !is.na(old) if (any(set)) do.call("Sys.setenv", as.list(envs[set])) if (any(!set)) Sys.unsetenv(names(envs)[!set]) invisible(old) } with_envvar <- function(new, code) { old <- set_envvar(new) on.exit(set_envvar(old)) force(code) } is.named <- function(x) { !is.null(names(x)) && all(names(x) != "") } pkg_installed <- function(pkg) { if (pkg %in% loadedNamespaces()) { TRUE } else if (requireNamespace(pkg, quietly = TRUE)) { try(unloadNamespace(pkg)) TRUE } else { FALSE } } has_package <- function(pkg) { if (pkg %in% loadedNamespaces()) { TRUE } else { requireNamespace(pkg, quietly = TRUE) } } with_something <- function(set, reset = set) { function(new, code) { old <- set(new) on.exit(reset(old)) force(code) } } in_dir <- with_something(setwd) get_r_version <- function() { paste(R.version$major, sep = ".", R.version$minor) } set_options <- function(x) { do.call(options, as.list(x)) } with_options <- with_something(set_options) # Read the current user .Rprofile. Here is the order it is searched, from # ?Startup # # 'R_PROFILE_USER’ environment variable (and tilde expansion # will be performed). If this is unset, a file called ‘.Rprofile’ # is searched for in the current directory or in the user's home # directory (in that order). The user profile file is sourced into # the workspace. read_rprofile_user <- function() { f <- normalizePath(Sys.getenv("R_PROFILE_USER", ""), mustWork = FALSE) if (file.exists(f)) { return(readLines(f)) } f <- normalizePath("~/.Rprofile", mustWork = FALSE) if (file.exists(f)) { return(readLines(f)) } character() } with_rprofile_user <- function(new, code) { temp_rprofile <- tempfile() on.exit(unlink(temp_rprofile), add = TRUE) writeLines(c(read_rprofile_user(), new), temp_rprofile) with_envvar(c("R_PROFILE_USER" = temp_rprofile), { force(code) }) } ## There are two kinds of tar on windows, one needs --force-local ## not to interpret : characters, the other does not. We try both ways. untar <- function(tarfile, ...) { if (os_type() == "windows") { tarhelp <- tryCatch( system2("tar", "--help", stdout = TRUE, stderr = TRUE), error = function(x) "") if (any(grepl("--force-local", tarhelp))) { status <- try( suppressWarnings(utils::untar(tarfile, extras = "--force-local", ...)), silent = TRUE) if (! is_tar_error(status)) { return(status) } else { message("External tar failed with `--force-local`, trying without") } } } utils::untar(tarfile, ...) } is_tar_error <- function(status) { inherits(status, "try-error") || is_error_status(status) || is_error_status(attr(status, "status")) } is_error_status <- function(x) { is.numeric(x) && length(x) > 0 && !is.na(x) && x != 0 } os_type <- function() { .Platform$OS.type } sys_type <- function() { if (.Platform$OS.type == "windows") { "windows" } else if (Sys.info()["sysname"] == "Darwin") { "macos" } else if (Sys.info()["sysname"] == "Linux") { "linux" } else if (.Platform$OS.type == "unix") { "unix" } else { stop("Unknown OS") } } is_dir <- function(path) { file.info(path)$isdir } untar_description <- function(tarball, dir = tempfile()) { files <- untar(tarball, list = TRUE) desc <- grep("^[^/]+/DESCRIPTION$", files, value = TRUE) if (length(desc) < 1) stop("No 'DESCRIPTION' file in package") untar(tarball, desc, exdir = dir) file.path(dir, desc) } ## copied from rematch2@180fb61 re_match <- function(text, pattern, perl = TRUE, ...) { stopifnot(is.character(pattern), length(pattern) == 1, !is.na(pattern)) text <- as.character(text) match <- regexpr(pattern, text, perl = perl, ...) start <- as.vector(match) length <- attr(match, "match.length") end <- start + length - 1L matchstr <- substring(text, start, end) matchstr[ start == -1 ] <- NA_character_ res <- data.frame( stringsAsFactors = FALSE, .text = text, .match = matchstr ) if (!is.null(attr(match, "capture.start"))) { gstart <- attr(match, "capture.start") glength <- attr(match, "capture.length") gend <- gstart + glength - 1L groupstr <- substring(text, gstart, gend) groupstr[ gstart == -1 ] <- NA_character_ dim(groupstr) <- dim(gstart) res <- cbind(groupstr, res, stringsAsFactors = FALSE) } names(res) <- c(attr(match, "capture.names"), ".text", ".match") class(res) <- c("tbl_df", "tbl", class(res)) res } is_standalone <- function() { isTRUE(config_val_to_logical(Sys.getenv("R_REMOTES_STANDALONE", "false"))) } # This code is adapted from the perl MIME::Base64 module https://perldoc.perl.org/MIME/Base64.html # https://github.com/gisle/mime-base64/blob/cf23d49e517c6ed8f4b24295f63721e8c9935010/Base64.xs#L197 XX <- 255L EQ <- 254L INVALID <- XX index_64 <- as.integer(c( XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,62, XX,XX,XX,63, 52,53,54,55, 56,57,58,59, 60,61,XX,XX, XX,EQ,XX,XX, XX, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10, 11,12,13,14, 15,16,17,18, 19,20,21,22, 23,24,25,XX, XX,XX,XX,XX, XX,26,27,28, 29,30,31,32, 33,34,35,36, 37,38,39,40, 41,42,43,44, 45,46,47,48, 49,50,51,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX )) base64_decode <- function(x) { if (is.character(x)) { x <- charToRaw(x) } len <- length(x) idx <- 1 c <- integer(4) out <- raw() while(idx <= len) { i <- 1 while(i <= 4) { uc <- index_64[[as.integer(x[[idx]]) + 1L]] idx <- idx + 1 if (uc != INVALID) { c[[i]] <- uc i <- i + 1 } if (idx > len) { if (i <= 4) { if (i <= 2) return(rawToChar(out)) if (i == 3) { c[[3]] <- EQ c[[4]] <- EQ } break } } } if (c[[1]] == EQ || c[[2]] == EQ) { break } #print(sprintf("c1=%d,c2=%d,c3=%d,c4=%d\n", c[1],c[2],c[3],c[4])) out[[length(out) + 1]] <- as.raw(bitwOr(bitwShiftL(c[[1]], 2L), bitwShiftR(bitwAnd(c[[2]], 0x30), 4L))) if (c[[3]] == EQ) { break } out[[length(out) + 1]] <- as.raw(bitwOr(bitwShiftL(bitwAnd(c[[2]], 0x0F), 4L), bitwShiftR(bitwAnd(c[[3]], 0x3C), 2L))) if (c[[4]] == EQ) { break } out[[length(out) + 1]] <- as.raw(bitwOr(bitwShiftL(bitwAnd(c[[3]], 0x03), 6L), c[[4]])) } rawToChar(out) } basis64 <- charToRaw(paste(c(LETTERS, letters, 0:9, "+", "/"), collapse = "")) base64_encode <- function(x) { if (is.character(x)) { x <- charToRaw(x) } len <- length(x) rlen <- floor((len + 2L) / 3L) * 4L out <- raw(rlen) ip <- op <- 1L c <- integer(4) while (len > 0L) { c[[1]] <- as.integer(x[[ip]]) ip <- ip + 1L if (len > 1L) { c[[2]] <- as.integer(x[ip]) ip <- ip + 1L } else { c[[2]] <- 0L } out[op] <- basis64[1 + bitwShiftR(c[[1]], 2L)] op <- op + 1L out[op] <- basis64[1 + bitwOr(bitwShiftL(bitwAnd(c[[1]], 3L), 4L), bitwShiftR(bitwAnd(c[[2]], 240L), 4L))] op <- op + 1L if (len > 2) { c[[3]] <- as.integer(x[ip]) ip <- ip + 1L out[op] <- basis64[1 + bitwOr(bitwShiftL(bitwAnd(c[[2]], 15L), 2L), bitwShiftR(bitwAnd(c[[3]], 192L), 6L))] op <- op + 1L out[op] <- basis64[1 + bitwAnd(c[[3]], 63)] op <- op + 1L } else if (len == 2) { out[op] <- basis64[1 + bitwShiftL(bitwAnd(c[[2]], 15L), 2L)] op <- op + 1L out[op] <- charToRaw("=") op <- op + 1L } else { ## len == 1 out[op] <- charToRaw("=") op <- op + 1L out[op] <- charToRaw("=") op <- op + 1L } len <- len - 3L } rawToChar(out) } build_url <- function(host, ...) { download_url(do.call(file.path, as.list(c(host, ...)))) } download_url <- function(url) { if (!grepl("^[[:alpha:]]+://", url)) { scheme <- if (download_method_secure()) "https://" else "http://" return(paste0(scheme, url)) } url } is_na <- function(x) { length(x) == 1 && is.na(x) } dir.exists <- function(paths) { if (getRversion() < "3.2") { x <- base::file.info(paths)$isdir !is.na(x) & x } else { ("base" %::% "dir.exists")(paths) } } is_binary_pkg <- function(x) { file_ext(x) %in% c("tgz", "zip") } format_str <- function(x, width = Inf, trim = TRUE, justify = "none", ...) { x <- format(x, trim = trim, justify = justify, ...) if (width < Inf) { x_width <- nchar(x, "width") too_wide <- x_width > width if (any(too_wide)) { x[too_wide] <- paste0(substr(x[too_wide], 1, width - 3), "...") } } x } warn_for_potential_errors <- function() { if (sys_type() == "windows" && grepl(" ", R.home()) && getRversion() <= "3.4.2") { warning(immediate. = TRUE, "\n!!! Installation will probably fail!\n", "This version of R has trouble with building and installing packages if\n", "the R HOME directory (currently '", R.home(), "')\n", "has space characters. Possible workarounds include:\n", "- installing R to the C: drive,\n", "- installing it into a path without a space, or\n", "- creating a drive letter for R HOME via the `subst` windows command, and\n", " starting R from the new drive.\n", "See also https://github.com/r-lib/remotes/issues/98\n") } } # Return all directories in the input paths directories <- function(paths) { dirs <- unique(dirname(paths)) out <- dirs[dirs != "."] while(length(dirs) > 0 && any(dirs != ".")) { out <- unique(c(out, dirs[dirs != "."])) dirs <- unique(dirname(dirs)) } sort(out) } in_r_build_ignore <- function(paths, ignore_file) { ignore <- ("tools" %:::% "get_exclude_patterns")() if (file.exists(ignore_file)) { ignore <- c(ignore, readLines(ignore_file, warn = FALSE)) } matches_ignores <- function(x) { any(vlapply(ignore, grepl, x, perl = TRUE, ignore.case = TRUE)) } # We need to search for the paths as well as directories in the path, so # `^foo$` matches `foo/bar` should_ignore <- function(path) { any(vlapply(c(path, directories(path)), matches_ignores)) } vlapply(paths, should_ignore) } dev_split_ref <- function(x) { re_match(x, "^(?[^@#]+)(?[@#].*)?$") } get_json_sha <- function(text) { m <- regexpr(paste0('"sha"\\s*:\\s*"(\\w+)"'), text, perl = TRUE) if (all(m == -1)) { return(json$parse(text)$sha %||% NA_character_) } start <- attr(m, "capture.start") end <- start + attr(m, "capture.length") - 1L substring(text, start, end) } # from tools:::config_val_to_logical config_val_to_logical <- function (val) { v <- tolower(val) if (v %in% c("1", "yes", "true")) TRUE else if (v %in% c("0", "no", "false")) FALSE else { NA } } remotes/R/install-url.R0000644000176200001440000000471113476462151014536 0ustar liggesusers #' Install a package from a url #' #' This function is vectorised so you can install multiple packages in #' a single command. #' #' @param url location of package on internet. The url should point to a #' zip file, a tar file or a bzipped/gzipped tar file. #' @param subdir subdirectory within url bundle that contains the R package. #' @param ... Other arguments passed on to [utils::install.packages()]. #' @inheritParams install_github #' @export #' #' @family package installation #' @examples #' \dontrun{ #' install_url("https://github.com/hadley/stringr/archive/master.zip") #' } install_url <- function(url, subdir = NULL, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { remotes <- lapply(url, url_remote, subdir = subdir) install_remotes(remotes, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } url_remote <- function(url, subdir = NULL, ...) { remote("url", url = url, subdir = subdir ) } #' @importFrom tools file_ext #' @export remote_download.url_remote <- function(x, quiet = FALSE) { if (!quiet) { message("Downloading package from url: ", x$url) # nocov } ext <- if (grepl("\\.tar\\.gz$", x$url)) "tar.gz" else file_ext(x$url) bundle <- tempfile(fileext = paste0(".", ext)) download(bundle, x$url) } #' @export remote_metadata.url_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { list( RemoteType = "url", RemoteUrl = x$url, RemoteSubdir = x$subdir ) } #' @export remote_package_name.url_remote <- function(remote, ...) { NA_character_ } #' @export remote_sha.url_remote <- function(remote, ...) { NA_character_ } #' @export format.url_remote <- function(x, ...) { "URL" } remotes/R/install.R0000644000176200001440000001530513577155371013744 0ustar liggesusersinstall <- function(pkgdir, dependencies, quiet, build, build_opts, build_manual, build_vignettes, upgrade, repos, type, ...) { warn_for_potential_errors() if (file.exists(file.path(pkgdir, "src"))) { if (has_package("pkgbuild")) { pkgbuild::local_build_tools(required = TRUE) } else if (!has_devel()) { missing_devel_warning(pkgdir) } } pkg_name <- load_pkg_description(pkgdir)$package ## Check for circular dependencies. We need to know about the root ## of the install process. if (is_root_install()) on.exit(exit_from_root_install(), add = TRUE) if (check_for_circular_dependencies(pkgdir, quiet)) { return(invisible(pkg_name)) } install_deps(pkgdir, dependencies = dependencies, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, upgrade = upgrade, repos = repos, type = type, ...) if (isTRUE(build)) { dir <- tempfile() dir.create(dir) on.exit(unlink(dir), add = TRUE) pkgdir <- safe_build_package(pkgdir, build_opts, build_manual, build_vignettes, dir, quiet) } safe_install_packages( pkgdir, repos = NULL, quiet = quiet, type = "source", ... ) invisible(pkg_name) } safe_install_packages <- function(...) { lib <- paste(.libPaths(), collapse = .Platform$path.sep) if (!is_standalone() && has_package("crancache") && has_package("callr")) { i.p <- "crancache" %::% "install_packages" } else { i.p <- utils::install.packages } with_options(list(install.lock = getOption("install.lock", TRUE)), { with_envvar( c(R_LIBS = lib, R_LIBS_USER = lib, R_LIBS_SITE = lib, RGL_USE_NULL = "TRUE"), # Set options(warn = 2) for this process and child processes, so that # warnings from `install.packages()` are converted to errors. if (should_error_for_warnings()) { with_options(list(warn = 2), with_rprofile_user("options(warn = 2)", i.p(...) ) ) } else { i.p(...) } ) }) } normalize_build_opts <- function(build_opts, build_manual, build_vignettes) { if (!isTRUE(build_manual)) { build_opts <- union(build_opts, "--no-manual") } else { build_opts <- setdiff(build_opts, "--no-manual") } if (!isTRUE(build_vignettes)) { build_opts <- union(build_opts, "--no-build-vignettes") } else { build_opts <- setdiff(build_opts, "--no-build-vignettes") } build_opts } safe_build_package <- function(pkgdir, build_opts, build_manual, build_vignettes, dest_path, quiet, use_pkgbuild = !is_standalone() && pkg_installed("pkgbuild")) { build_opts <- normalize_build_opts(build_opts, build_manual, build_vignettes) if (use_pkgbuild) { vignettes <- TRUE manual <- FALSE has_no_vignettes <- grepl("--no-build-vignettes", build_opts) if (any(has_no_vignettes)) { vignettes <- FALSE } has_no_manual <- grepl("--no-manual", build_opts) if (!any(has_no_manual)) { manual <- TRUE } build_opts <- build_opts[!(has_no_vignettes | has_no_manual)] pkgbuild::build(pkgdir, dest_path = dest_path, binary = FALSE, vignettes = vignettes, manual = manual, args = build_opts, quiet = quiet) } else { # No pkgbuild, so we need to call R CMD build ourselves lib <- paste(.libPaths(), collapse = .Platform$path.sep) env <- c(R_LIBS = lib, R_LIBS_USER = lib, R_LIBS_SITE = lib, R_PROFILE_USER = tempfile()) pkgdir <- normalizePath(pkgdir) message("Running `R CMD build`...") in_dir(dest_path, { with_envvar(env, { output <- rcmd("build", c(build_opts, shQuote(pkgdir)), quiet = quiet, fail_on_status = FALSE) }) }) if (output$status != 0) { cat("STDOUT:\n") cat(output$stdout, sep = "\n") cat("STDERR:\n") cat(output$stderr, sep = "\n") msg_for_long_paths(output) stop(sprintf("Failed to `R CMD build` package, try `build = FALSE`."), call. = FALSE) } building_regex <- paste0( "^[*] building[^[:alnum:]]+", # prefix, "* building '" "([-[:alnum:]_.]+)", # package file name, e.g. xy_1.0-2.tar.gz "[^[:alnum:]]+$" # trailing quote ) pkgfile <- sub(building_regex, "\\1", output$stdout[length(output$stdout)]) file.path(dest_path, pkgfile) } } msg_for_long_paths <- function(output) { if (sys_type() == "windows" && (r_error_matches("over-long path", output$stderr) || r_error_matches("over-long path length", output$stderr))) { message( "\nIt seems that this package contains files with very long paths.\n", "This is not supported on most Windows versions. Please contact the\n", "package authors and tell them about this. See this GitHub issue\n", "for more details: https://github.com/r-lib/remotes/issues/84\n") } } r_error_matches <- function(msg, str) { any(grepl(msg, str)) || any(grepl(gettext(msg, domain = "R"), str)) } #' Install package dependencies if needed. #' #' @inheritParams package_deps #' @param ... additional arguments passed to [utils::install.packages()]. #' @param build If `TRUE` build the package before installing. #' @param build_opts Options to pass to `R CMD build`, only used when `build` #' @param build_manual If `FALSE`, don't build PDF manual ('--no-manual'). #' @param build_vignettes If `FALSE`, don't build package vignettes ('--no-build-vignettes'). #' is `TRUE`. #' @export #' @examples #' \dontrun{install_deps(".")} install_deps <- function(pkgdir = ".", dependencies = NA, repos = getOption("repos"), type = getOption("pkgType"), upgrade = c("default", "ask", "always", "never"), quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, ...) { packages <- dev_package_deps( pkgdir, repos = repos, dependencies = dependencies, type = type ) dep_deps <- if (isTRUE(dependencies)) NA else dependencies update( packages, dependencies = dep_deps, quiet = quiet, upgrade = upgrade, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, type = type, ... ) } should_error_for_warnings <- function() { force_suggests <- Sys.getenv("_R_CHECK_FORCE_SUGGESTS_", "true") no_errors <- Sys.getenv("R_REMOTES_NO_ERRORS_FROM_WARNINGS", !config_val_to_logical(force_suggests)) !config_val_to_logical(no_errors) } remotes/R/install-gitlab.R0000644000176200001440000001333413621300305015157 0ustar liggesusers#' Install a package from GitLab #' #' This function is vectorised on `repo` so you can install multiple #' packages in a single command. Like other remotes the repository will skip #' installation if `force == FALSE` (the default) and the remote state has #' not changed since the previous installation. #' #' @inheritParams install_github #' @param repo Repository address in the format #' `username/repo[@@ref]`. #' @param host GitLab API host to use. Override with your GitLab enterprise #' hostname, for example, `"gitlab.hostname.com"`. #' @param auth_token To install from a private repo, generate a personal access #' token (PAT) in \url{https://gitlab.com/profile/personal_access_tokens} and #' supply to this argument. This is safer than using a password because you #' can easily delete a PAT without affecting any others. Defaults to the #' GITLAB_PAT environment variable. #' @inheritParams install_github #' @export #' @family package installation #' @examples #' \dontrun{ #' install_gitlab("jimhester/covr") #' } install_gitlab <- function(repo, subdir = NULL, auth_token = gitlab_pat(quiet), host = "gitlab.com", dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { remotes <- lapply(repo, gitlab_remote, subdir = subdir, auth_token = auth_token, host = host) install_remotes(remotes, auth_token = auth_token, host = host, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } gitlab_remote <- function(repo, subdir = NULL, auth_token = gitlab_pat(), sha = NULL, host = "gitlab.com", ...) { meta <- parse_git_repo(repo) meta$ref <- meta$ref %||% "master" remote("gitlab", host = host, repo = paste(c(meta$repo, meta$subdir), collapse = "/"), subdir = subdir, username = meta$username, ref = meta$ref, sha = sha, auth_token = auth_token ) } #' @export remote_download.gitlab_remote <- function(x, quiet = FALSE) { dest <- tempfile(fileext = paste0(".tar.gz")) project_id <- gitlab_project_id(x$username, x$repo, x$ref, x$host, x$auth_token) src_root <- build_url(x$host, "api", "v4", "projects", project_id) src <- paste0(src_root, "/repository/archive.tar.gz?sha=", utils::URLencode(x$ref, reserved = TRUE)) if (!quiet) { message("Downloading GitLab repo ", x$username, "/", x$repo, "@", x$ref, "\nfrom URL ", src) } download(dest, src, headers = c("Private-Token" = x$auth_token)) } #' @export remote_metadata.gitlab_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { if (!is.null(bundle)) { # Might be able to get from archive sha <- git_extract_sha1_tar(bundle) } else if (is_na(sha)) { sha <- NULL } list( RemoteType = "gitlab", RemoteHost = x$host, RemoteRepo = x$repo, RemoteUsername = x$username, RemoteRef = x$ref, RemoteSha = sha, RemoteSubdir = x$subdir ) } #' @export remote_package_name.gitlab_remote <- function(remote, ...) { tmp <- tempfile() src_root <- build_url( remote$host, "api", "v4", "projects", utils::URLencode(paste0(remote$username, "/", remote$repo), reserved = TRUE), "repository") src <- paste0( src_root, "/files/", ifelse( is.null(remote$subdir), "DESCRIPTION", utils::URLencode(paste0(remote$subdir, "/DESCRIPTION"), reserved = TRUE)), "/raw?ref=", remote$ref) dest <- tempfile() res <- download(dest, src, headers = c("Private-Token" = remote$auth_token)) tryCatch( read_dcf(dest)$Package, error = function(e) remote$repo) } #' @export remote_sha.gitlab_remote <- function(remote, ...) { gitlab_commit(username = remote$username, repo = remote$repo, host = remote$host, ref = remote$ref, pat = remote$auth_token) } #' @export format.gitlab_remote <- function(x, ...) { "GitLab" } gitlab_commit <- function(username, repo, ref = "master", host = "gitlab.com", pat = gitlab_pat()) { url <- build_url(host, "api", "v4", "projects", utils::URLencode(paste0(username, "/", repo), reserved = TRUE), "repository", "commits", ref) tmp <- tempfile() download(tmp, url, headers = c("Private-Token" = pat)) json$parse_file(tmp)$id } #' Retrieve GitLab personal access token. #' #' A GitLab personal access token #' Looks in env var `GITLAB_PAT` #' #' @keywords internal #' @export gitlab_pat <- function(quiet = TRUE) { pat <- Sys.getenv("GITLAB_PAT") if (nzchar(pat)) { if (!quiet) { message("Using GitLab PAT from envvar GITLAB_PAT") } return(pat) } return(NULL) } gitlab_project_id <- function(username, repo, ref = "master", host = "gitlab.com", pat = gitlab_pat()) { url <- build_url(host, "api", "v4", "projects", utils::URLencode(paste0(username, "/", repo), reserved = TRUE), "repository", "commits", ref) tmp <- tempfile() download(tmp, url, headers = c("Private-Token" = pat)) json$parse_file(tmp)$project_id } remotes/R/system.R0000644000176200001440000000213513243326354013606 0ustar liggesusers system_check <- function(command, args = character(), quiet = TRUE, error = TRUE, path = ".") { out <- tempfile() err <- tempfile() on.exit(unlink(out), add = TRUE) on.exit(unlink(err), add = TRUE) ## We suppress warnings, they are given if the command ## exits with a non-zero status res <- in_dir( path, suppressWarnings( system2(command, args = args, stdout = out, stderr = err) ) ) res <- list( stdout = tryCatch( suppressWarnings(win2unix(read_char(out))), error = function(e) "" ), stderr = tryCatch( suppressWarnings(win2unix(read_char(err))), error = function(e) "" ), status = res ) if (error && res$status != 0) { stop("Command ", command, " failed ", res$stderr) } if (! quiet) { if (! identical(res$stdout, NA_character_)) cat(res$stdout) if (! identical(res$stderr, NA_character_)) cat(res$stderr) } res } win2unix <- function(str) { gsub("\r\n", "\n", str, fixed = TRUE) } read_char <- function(path, ...) { readChar(path, nchars = file.info(path)$size, ...) } remotes/R/install-cran.R0000644000176200001440000000352013476462151014654 0ustar liggesusers #' Attempts to install a package from CRAN. #' #' This function is vectorised on `pkgs` so you can install multiple #' packages in a single command. #' #' @param pkgs Character vector of packages to install. #' @inheritParams install_github #' @export #' @family package installation #' @examples #' \dontrun{ #' install_cran("ggplot2") #' install_cran(c("httpuv", "shiny")) #' } install_cran <- function(pkgs, repos = getOption("repos"), type = getOption("pkgType"), dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, ...) { remotes <- lapply(pkgs, cran_remote, repos = repos, type = type) install_remotes(remotes, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } cran_remote <- function(pkg, repos, type, ...) { remote("cran", name = pkg, repos = repos, pkg_type = type) } #' @export remote_package_name.cran_remote <- function(remote, ...) { remote$name } #' @export remote_sha.cran_remote <- function(remote, ...) { cran <- available_packages(remote$repos, remote$pkg_type) trim_ws(unname(cran[, "Version"][match(remote$name, rownames(cran))])) } #' @export format.cran_remote <- function(x, ...) { "CRAN" } remotes/R/download.R0000644000176200001440000001634713504226161014076 0ustar liggesusers #' Download a file #' #' Uses either the curl package for R versions older than 3.2.0, #' otherwise a wrapper around [download.file()]. #' #' We respect the `download.file.method` setting of the user. If it is #' not set, then see `download_method()` for choosing a method. #' #' Authentication can be supplied three ways: #' * By setting `auth_token`. This will append an HTTP `Authorization` #' header: `Authorization: token {auth_token}`. #' * By setting `basic_auth` to a list with elements `user` and `password`. #' This will append a proper `Authorization: Basic {encoded_password}` #' HTTP header. #' * By specifying the proper `headers` directly. #' #' If both `auth_token` and `basic_auth` are specified, that's an error. #' `auth_token` and `basic_auth` are _appended_ to `headers`, so they #' take precedence over an `Authorization` header that is specified #' directly in `headers`. #' #' @param path Path to download to. `dirname(path)` must exist. #' @param url URL. #' @param auth_token Token for token-based authentication or `NULL`. #' @param basic_auth List with `user` and `password` for basic HTTP #' authentication, or `NULL`. #' @param quiet Passed to [download.file()] or [curl::curl_download()]. #' @param headers Named character vector of HTTP headers to use. #' @return `path`, if the download was successful. #' #' @keywords internal #' @importFrom utils compareVersion download <- function(path, url, auth_token = NULL, basic_auth = NULL, quiet = TRUE, headers = NULL) { if (!is.null(basic_auth) && !is.null(auth_token)) { stop("Cannot use both Basic and Token authentication at the same time") } if (!is.null(basic_auth)) { userpass <- paste0(basic_auth$user, ":", basic_auth$password) auth <- paste("Basic", base64_encode(charToRaw(userpass))) headers <- c(headers, Authorization = auth) } if (!is.null(auth_token)) { headers <- c(headers, Authorization = paste("token", auth_token)) } if (getRversion() < "3.2.0") { curl_download(url, path, quiet, headers) } else { base_download(url, path, quiet, headers) } path } base_download <- function(url, path, quiet, headers) { method <- download_method() status <- if (method == "wget") { base_download_wget(url, path, quiet, headers) } else if (method =="curl") { base_download_curl(url, path, quiet, headers) } else if (getRversion() < "3.6.0") { base_download_noheaders(url, path, quiet, headers, method) } else { base_download_headers(url, path, quiet, headers, method) } if (status != 0) stop("Cannot download file from ", url, call. = FALSE) path } base_download_wget <- function(url, path, quiet, headers) { extra <- getOption("download.file.extra") if (length(headers)) { qh <- shQuote(paste0(names(headers), ": ", headers)) extra <- c(extra, paste0("--header=", qh)) } with_options( list(download.file.extra = extra), suppressWarnings( utils::download.file( url, path, method = "wget", quiet = quiet, mode = "wb", extra = extra ) ) ) } base_download_curl <- function(url, path, quiet, headers) { extra <- getOption("download.file.extra") # always add `-L`, so that curl follows redirects. GitHub in particular uses # 302 redirects extensively, so without -L these requests fail. extra <- c(extra, "-L") if (length(headers)) { qh <- shQuote(paste0(names(headers), ": ", headers)) extra <- c(extra, paste("-H", qh)) } with_options( list(download.file.extra = extra), suppressWarnings( utils::download.file( url, path, method = "curl", quiet = quiet, mode = "wb", extra = extra ) ) ) } base_download_noheaders <- function(url, path, quiet, headers, method) { if (length(headers)) { if (method == "wininet" && getRversion() < "3.6.0") { warning(paste( "R (< 3.6.0) cannot send HTTP headers with the `wininet` download method.", "This download will likely fail. Please choose a different download", "method, via the `download.file.method` option. The `libcurl` method is", "best, if available, and the `wget` and `curl` methods work as well,", "if the corresponding external tool is available. See `?download.file`")) } get("unlockBinding", baseenv())("makeUserAgent", asNamespace("utils")) orig <- get("makeUserAgent", envir = asNamespace("utils")) on.exit({ assign("makeUserAgent", orig, envir = asNamespace("utils")) lockBinding("makeUserAgent", asNamespace("utils")) }, add = TRUE) ua <- orig(FALSE) flathead <- paste0(names(headers), ": ", headers, collapse = "\r\n") agent <- paste0(ua, "\r\n", flathead) assign( "makeUserAgent", envir = asNamespace("utils"), function(format = TRUE) { if (format) { paste0("User-Agent: ", agent, "\r\n") } else { agent } }) } suppressWarnings( utils::download.file( url, path, method = method, quiet = quiet, mode = "wb" ) ) } base_download_headers <- function(url, path, quiet, headers, method) { suppressWarnings( utils::download.file( url, path, method = method, quiet = quiet, mode = "wb", headers = headers ) ) } has_curl <- function() isTRUE(unname(capabilities("libcurl"))) download_method <- function() { user_option <- getOption("download.file.method") if (!is.null(user_option)) { ## The user wants what the user wants user_option } else if (has_curl()) { ## If we have libcurl, it is usually the best option "libcurl" } else if (compareVersion(get_r_version(), "3.3") == -1 && os_type() == "windows") { ## Before 3.3 we select wininet on Windows "wininet" } else { ## Otherwise this is probably hopeless, but let R select, and ## try something "auto" } } curl_download <- function(url, path, quiet, headers) { if (!pkg_installed("curl")) { stop("The 'curl' package is required if R is older than 3.2.0") } handle <- curl::new_handle() if (!is.null(headers)) curl::handle_setheaders(handle, .list = headers) curl::curl_download(url, path, quiet = quiet, mode = "wb", handle = handle) } true_download_method <- function(x) { if (identical(x, "auto")) { auto_download_method() } else { x } } auto_download_method <- function() { if (isTRUE(capabilities("libcurl"))) { "libcurl" } else if (isTRUE(capabilities("http/ftp"))) { "internal" } else if (nzchar(Sys.which("wget"))) { "wget" } else if (nzchar(Sys.which("curl"))) { "curl" } else { "" } } download_method_secure <- function() { method <- true_download_method(download_method()) if (method %in% c("wininet", "libcurl", "wget", "curl")) { # known good methods TRUE } else if (identical(method, "internal")) { # only done before R 3.3 if (utils::compareVersion(get_r_version(), "3.3") == -1) { # if internal then see if were using windows internal with inet2 identical(Sys.info()[["sysname"]], "Windows") && utils::setInternet2(NA) } else { FALSE } } else { # method with unknown properties (e.g. "lynx") or unresolved auto FALSE } } remotes/R/bioc.R0000644000176200001440000000313613577155371013211 0ustar liggesusers #' @export #' @rdname bioc_install_repos #' @keywords internal #' @examples #' bioc_version() #' bioc_version("3.4") bioc_version <- function(r_ver = getRversion()) { bioconductor$get_bioc_version(r_ver) } #' Tools for Bioconductor repositories #' #' `bioc_version()` returns the Bioconductor version for the current or the #' specified R version. #' #' `bioc_install_repos()` deduces the URLs of the Bioconductor repositories. #' #' @details #' Both functions observe the `R_BIOC_VERSION` environment variable, which #' can be set to force a Bioconductor version. If this is set, then the #' `r_ver` and `bioc_ver` arguments are ignored. #' #' `bioc_install_repos()` observes the `R_BIOC_MIRROR` environment variable #' and also the `BioC_mirror` option, which can be set to the desired #' Bioconductor mirror. The option takes precedence if both are set. Its #' default value is `https://bioconductor.org`. #' #' @return #' `bioc_version()` returns a Bioconductor version, a `package_version` #' object. #' #' `bioc_install_repos()` returns a named character vector of the URLs of #' the Bioconductor repositories, appropriate for the current or the #' specified R version. #' #' @param r_ver R version to use. For `bioc_install_repos()` it is #' ignored if `bioc_ver` is specified. #' @param bioc_ver Bioconductor version to use. Defaults to the default one #' corresponding to `r_ver`. #' #' @export #' @keywords internal #' @examples #' bioc_install_repos() bioc_install_repos <- function(r_ver = getRversion(), bioc_ver = bioc_version(r_ver)) { bioconductor$get_repos(bioc_ver) } remotes/R/install-git.R0000644000176200001440000001666013503144672014521 0ustar liggesusers #' Install a package from a git repository #' #' It is vectorised so you can install multiple packages with #' a single command. You do not need to have the `git2r` package, #' or an external git client installed. #' #' If you need to set git credentials for use in the `Remotes` field you can do #' so by placing the credentials in the `remotes.git_credentials` global #' option. #' #' @param url Location of package. The url should point to a public or #' private repository. #' @param ref Name of branch, tag or SHA reference to use, if not HEAD. #' @param branch Deprecated, synonym for ref. #' @param subdir A sub-directory within a git repository that may #' contain the package we are interested in installing. #' @param credentials A git2r credentials object passed through to clone. #' Supplying this argument implies using `git2r` with `git`. #' @param git Whether to use the `git2r` package, or an external #' git client via system. Default is `git2r` if it is installed, #' otherwise an external git installation. #' @param ... Other arguments passed on to [utils::install.packages()]. #' @inheritParams install_github #' @family package installation #' @export #' @examples #' \dontrun{ #' install_git("git://github.com/hadley/stringr.git") #' install_git("git://github.com/hadley/stringr.git", ref = "stringr-0.2") #'} install_git <- function(url, subdir = NULL, ref = NULL, branch = NULL, credentials = git_credentials(), git = c("auto", "git2r", "external"), dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { if (!missing(branch)) { warning("`branch` is deprecated, please use `ref`") ref <- branch } remotes <- lapply(url, git_remote, subdir = subdir, ref = ref, credentials = credentials, git = match.arg(git)) install_remotes(remotes, credentials = credentials, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } git_remote <- function(url, subdir = NULL, ref = NULL, credentials = git_credentials(), git = c("auto", "git2r", "external"), ...) { git <- match.arg(git) if (git == "auto") { git <- if (!is_standalone() && pkg_installed("git2r")) "git2r" else "external" } if (!is.null(credentials) && git != "git2r") { stop("`credentials` can only be used with `git = \"git2r\"`", call. = FALSE) } list(git2r = git_remote_git2r, external = git_remote_xgit)[[git]](url, subdir, ref, credentials) } git_remote_git2r <- function(url, subdir = NULL, ref = NULL, credentials = git_credentials()) { remote("git2r", url = url, subdir = subdir, ref = ref, credentials = credentials ) } git_remote_xgit <- function(url, subdir = NULL, ref = NULL, credentials = git_credentials()) { remote("xgit", url = url, subdir = subdir, ref = ref ) } #' @export remote_download.git2r_remote <- function(x, quiet = FALSE) { if (!quiet) { message("Downloading git repo ", x$url) } bundle <- tempfile() git2r::clone(x$url, bundle, credentials = x$credentials, progress = FALSE) if (!is.null(x$ref)) { r <- git2r::repository(bundle) git2r::checkout(r, x$ref) } bundle } #' @export remote_metadata.git2r_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { if (!is.null(bundle)) { r <- git2r::repository(bundle) sha <- git2r::commits(r)[[1]]$sha } else { sha <- NULL } list( RemoteType = "git2r", RemoteUrl = x$url, RemoteSubdir = x$subdir, RemoteRef = x$ref, RemoteSha = sha ) } #' @export remote_package_name.git2r_remote <- function(remote, ...) { tmp <- tempfile() on.exit(unlink(tmp)) description_path <- paste0(collapse = "/", c(remote$subdir, "DESCRIPTION")) # Try using git archive --remote to retrieve the DESCRIPTION, if the protocol # or server doesn't support that return NA res <- try(silent = TRUE, system_check(git_path(), args = c("archive", "-o", tmp, "--remote", remote$url, if (is.null(remote$ref)) "HEAD" else remote$ref, description_path), quiet = TRUE)) if (inherits(res, "try-error")) { return(NA_character_) } # git archive returns a tar file, so extract it to tempdir and read the DCF utils::untar(tmp, files = description_path, exdir = tempdir()) read_dcf(file.path(tempdir(), description_path))$Package } #' @export remote_sha.git2r_remote <- function(remote, ...) { tryCatch({ # set suppressWarnings in git2r 0.23.0+ res <- suppressWarnings(git2r::remote_ls(remote$url, credentials=remote$credentials)) # This needs to be master, not HEAD because no ref is called HEAD ref <- remote$ref %||% "master" found <- grep(pattern = paste0("/", ref), x = names(res)) # If none found, it is either a SHA, so return the pinned sha or NA if (length(found) == 0) { return(remote$ref %||% NA_character_) } unname(res[found[1]]) }, error = function(e) { warning(e); NA_character_}) } #' @export format.xgit_remote <- function(x, ...) { "Git" } #' @export format.git2r_remote <- function(x, ...) { "Git" } #' @export remote_download.xgit_remote <- function(x, quiet = FALSE) { if (!quiet) { message("Downloading git repo ", x$url) } bundle <- tempfile() args <- c('clone', '--depth', '1', '--no-hardlinks') args <- c(args, x$args, x$url, bundle) git(paste0(args, collapse = " "), quiet = quiet) if (!is.null(x$ref)) { git(paste0(c("fetch", "origin", x$ref), collapse = " "), quiet = quiet, path = bundle) git(paste0(c("checkout", "FETCH_HEAD"), collapse = " "), quiet = quiet, path = bundle) } bundle } #' @export remote_metadata.xgit_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { if (is_na(sha)) { sha <- NULL } list( RemoteType = "xgit", RemoteUrl = x$url, RemoteSubdir = x$subdir, RemoteRef = x$ref, RemoteSha = sha, RemoteArgs = if (length(x$args) > 0) paste0(deparse(x$args), collapse = " ") ) } #' @importFrom utils read.delim #' @export remote_package_name.xgit_remote <- remote_package_name.git2r_remote #' @export remote_sha.xgit_remote <- function(remote, ...) { url <- remote$url ref <- remote$ref refs <- git(paste("ls-remote", url, ref)) # If none found, it is either a SHA, so return the pinned SHA or NA if (length(refs) == 0) { return(remote$ref %||% NA_character_) } refs_df <- read.delim(text = refs, stringsAsFactors = FALSE, sep = "\t", header = FALSE) names(refs_df) <- c("sha", "ref") refs_df$sha[[1]] } #' Specify git credentials to use #' #' The global option `remotes.git_credentials` is used to set the git #' credentials. #' @export #' @keywords internal git_credentials <- function() { getOption("remotes.git_credentials", NULL) } remotes/R/install-remote.R0000644000176200001440000002077213621300305015214 0ustar liggesusers#' Install a remote package. #' #' This: #' \enumerate{ #' \item downloads source bundle #' \item decompresses & checks that it's a package #' \item adds metadata to DESCRIPTION #' \item calls install #' } #' @noRd install_remote <- function(remote, dependencies, upgrade, force, quiet, build, build_opts, build_manual, build_vignettes, repos, type, ...) { stopifnot(is.remote(remote)) package_name <- remote_package_name(remote) local_sha <- local_sha(package_name) remote_sha <- remote_sha(remote, local_sha) if (!isTRUE(force) && !different_sha(remote_sha = remote_sha, local_sha = local_sha)) { if (!quiet) { message( "Skipping install of '", package_name, "' from a ", sub("_remote", "", class(remote)[1L]), " remote,", " the SHA1 (", substr(remote_sha, 1L, 8L), ") has not changed since last install.\n", " Use `force = TRUE` to force installation") } return(invisible(package_name)) } if (inherits(remote, "cran_remote")) { install_packages( package_name, repos = remote$repos, type = remote$pkg_type, dependencies = dependencies, quiet = quiet, ...) return(invisible(package_name)) } res <- try(bundle <- remote_download(remote, quiet = quiet), silent = quiet) if (inherits(res, "try-error")) { return(NA_character_) } on.exit(unlink(bundle), add = TRUE) source <- source_pkg(bundle, subdir = remote$subdir) on.exit(unlink(source, recursive = TRUE), add = TRUE) update_submodules(source, remote$subdir, quiet) add_metadata(source, remote_metadata(remote, bundle, source, remote_sha)) # Because we've modified DESCRIPTION, its original MD5 value is wrong clear_description_md5(source) install(source, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } install_remotes <- function(remotes, ...) { res <- character(length(remotes)) for (i in seq_along(remotes)) { tryCatch( res[[i]] <- install_remote(remotes[[i]], ...), error = function(e) { stop(remote_install_error(remotes[[i]], e)) }) } invisible(res) } remote_install_error <- function(remote, error) { msg <- sprintf( "Failed to install '%s' from %s:\n %s", remote_name_or_unknown(remote), format(remote), conditionMessage(error) ) structure(list(message = msg, call = NULL, error = error, remote = remote), class = c("install_error", "error", "condition")) } remote_name_or_unknown <- function(remote) { res <- tryCatch( res <- remote_package_name(remote), error = function(e) NA_character_) if (is.na(res)) { return("unknown package") } res } # Add metadata add_metadata <- function(pkg_path, meta) { # During installation, the DESCRIPTION file is read and an package.rds file # created with most of the information from the DESCRIPTION file. Functions # that read package metadata may use either the DESCRIPTION file or the # package.rds file, therefore we attempt to modify both of them source_desc <- file.path(pkg_path, "DESCRIPTION") binary_desc <- file.path(pkg_path, "Meta", "package.rds") if (file.exists(source_desc)) { desc <- read_dcf(source_desc) desc <- utils::modifyList(desc, meta) write_dcf(source_desc, desc) } if (file.exists(binary_desc)) { pkg_desc <- base::readRDS(binary_desc) desc <- as.list(pkg_desc$DESCRIPTION) desc <- utils::modifyList(desc, meta) pkg_desc$DESCRIPTION <- stats::setNames(as.character(desc), names(desc)) base::saveRDS(pkg_desc, binary_desc) } } # Modify the MD5 file - remove the line for DESCRIPTION clear_description_md5 <- function(pkg_path) { path <- file.path(pkg_path, "MD5") if (file.exists(path)) { text <- readLines(path) text <- text[!grepl(".*\\*DESCRIPTION$", text)] writeLines(text, path) } } remote <- function(type, ...) { structure(list(...), class = c(paste0(type, "_remote"), "remote")) } is.remote <- function(x) inherits(x, "remote") remote_download <- function(x, quiet = FALSE) UseMethod("remote_download") remote_metadata <- function(x, bundle = NULL, source = NULL, sha = NULL) UseMethod("remote_metadata") remote_package_name <- function(remote, ...) UseMethod("remote_package_name") remote_sha <- function(remote, ...) UseMethod("remote_sha") remote_package_name.default <- function(remote, ...) remote$repo remote_sha.default <- function(remote, ...) NA_character_ different_sha <- function(remote_sha, local_sha) { same <- remote_sha == local_sha same <- isTRUE(same) && !is.na(same) !same } local_sha <- function(name) { package2remote(name)$sha %||% NA_character_ } # Convert an installed package to its equivalent remote. This constructs the # remote from metadata stored in the package's DESCRIPTION file; the metadata # is added to the package when it is installed by remotes. If the package is # installed some other way, such as by `install.packages()` there will be no # meta-data, so there we construct a generic CRAN remote. package2remote <- function(name, lib = .libPaths(), repos = getOption("repos"), type = getOption("pkgType")) { x <- tryCatch(utils::packageDescription(name, lib.loc = lib), error = function(e) NA, warning = function(e) NA) # will be NA if not installed if (identical(x, NA)) { return(remote("cran", name = name, repos = repos, pkg_type = type, sha = NA_character_)) } if (is.null(x$RemoteType) || x$RemoteType == "cran") { # Packages installed with install.packages() or locally without remotes return(remote("cran", name = x$Package, repos = repos, pkg_type = type, sha = x$Version)) } switch(x$RemoteType, standard = remote("cran", name = x$Package, repos = x$RemoteRepos %||% repos, pkg_type = x$RemotePkgType %||% type, sha = x$RemoteSha), github = remote("github", host = x$RemoteHost, package = x$RemotePackage, repo = x$RemoteRepo, subdir = x$RemoteSubdir, username = x$RemoteUsername, ref = x$RemoteRef, sha = x$RemoteSha, auth_token = github_pat()), gitlab = remote("gitlab", host = x$RemoteHost, repo = x$RemoteRepo, subdir = x$RemoteSubdir, username = x$RemoteUsername, ref = x$RemoteRef, sha = x$RemoteSha, auth_token = gitlab_pat()), xgit = remote("xgit", url = trim_ws(x$RemoteUrl), ref = x$RemoteRef %||% x$RemoteBranch, sha = x$RemoteSha, subdir = x$RemoteSubdir, args = x$RemoteArgs), git2r = remote("git2r", url = trim_ws(x$RemoteUrl), ref = x$RemoteRef %||% x$RemoteBranch, sha = x$RemoteSha, subdir = x$RemoteSubdir, credentials = git_credentials()), bitbucket = remote("bitbucket", host = x$RemoteHost, repo = x$RemoteRepo, username = x$RemoteUsername, ref = x$RemoteRef, sha = x$RemoteSha, subdir = x$RemoteSubdir, auth_user = bitbucket_user(), password = bitbucket_password()), svn = remote("svn", url = trim_ws(x$RemoteUrl), svn_subdir = x$RemoteSubdir, revision = x$RemoteSha, args = x$RemoteArgs), local = remote("local", path = trim_ws(x$RemoteUrl), subdir = x$RemoteSubdir, sha = { # Packages installed locally might have RemoteSha == NA_character_ x$RemoteSha %||% x$Version }), url = remote("url", url = trim_ws(x$RemoteUrl), subdir = x$RemoteSubdir, config = x$RemoteConfig, pkg_type = x$RemotePkgType %||% type), bioc_git2r = remote("bioc_git2r", mirror = x$RemoteMirror, repo = x$RemoteRepo, release = x$RemoteRelease, sha = x$RemoteSha, branch = x$RemoteBranch), bioc_xgit = remote("bioc_xgit", mirror = x$RemoteMirror, repo = x$RemoteRepo, release = x$RemoteRelease, sha = x$RemoteSha, branch = x$RemoteBranch), stop(sprintf("can't convert package %s with RemoteType '%s' to remote", name, x$RemoteType)) ) } #' @export format.remotes <- function(x, ...) { vapply(x, format, character(1)) } remotes/R/github.R0000644000176200001440000001344513621300305013536 0ustar liggesusers github_GET <- function(path, ..., host = "api.github.com", pat = github_pat(), use_curl = !is_standalone() && pkg_installed("curl")) { url <- build_url(host, path) if (isTRUE(use_curl)) { h <- curl::new_handle() headers <- c( if (!is.null(pat)) { c("Authorization" = paste0("token ", pat)) } ) curl::handle_setheaders(h, .list = headers) res <- curl::curl_fetch_memory(url, handle = h) if (res$status_code >= 300) { stop(github_error(res)) } json$parse(rawToChar(res$content)) } else { tmp <- tempfile() download(tmp, url, auth_token = pat) json$parse_file(tmp) } } github_commit <- function(username, repo, ref = "master", host = "api.github.com", pat = github_pat(), use_curl = !is_standalone() && pkg_installed("curl"), current_sha = NULL) { url <- build_url(host, "repos", username, repo, "commits", utils::URLencode(ref, reserved = TRUE)) if (isTRUE(use_curl)) { h <- curl::new_handle() headers <- c( "Accept" = "application/vnd.github.v3.sha", if (!is.null(pat)) { c("Authorization" = paste0("token ", pat)) } ) if (!is.null(current_sha)) { headers <- c(headers, "If-None-Match" = paste0('"', current_sha, '"')) } curl::handle_setheaders(h, .list = headers) res <- curl::curl_fetch_memory(url, handle = h) if (res$status_code == 304) { return(current_sha) } if (res$status_code >= 300) { stop(github_error(res)) } rawToChar(res$content) } else { tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) download(tmp, url, auth_token = pat) get_json_sha(paste0(readLines(tmp, warn = FALSE), collapse = "\n")) } } #' Retrieve Github personal access token. #' #' A github personal access token #' Looks in env var `GITHUB_PAT` #' #' @keywords internal #' @noRd github_pat <- function(quiet = TRUE) { pat <- Sys.getenv("GITHUB_PAT") if (nzchar(pat)) { if (!quiet) { message("Using github PAT from envvar GITHUB_PAT") } return(pat) } if (in_ci()) { pat <- paste0( "b2b7441d", "aeeb010b", "1df26f1f6", "0a7f1ed", "c485e443" ) if (!quiet) { message("Using bundled GitHub PAT. Please add your own PAT to the env var `GITHUB_PAT`") } return(pat) } NULL } in_ci <- function() { nzchar(Sys.getenv("CI")) } in_travis <- function() { identical(Sys.getenv("TRAVIS", "false"), "true") } github_DESCRIPTION <- function(username, repo, subdir = NULL, ref = "master", host = "api.github.com", ..., use_curl = !is_standalone() && pkg_installed("curl"), pat = github_pat()) { if (!is.null(subdir)) { subdir <- utils::URLencode(subdir) } url <- build_url(host, "repos", username, repo, "contents", subdir, "DESCRIPTION") url <- paste0(url, "?ref=", utils::URLencode(ref)) if (isTRUE(use_curl)) { h <- curl::new_handle() headers <- c( "Accept" = "application/vnd.github.v3.raw", if (!is.null(pat)) { c("Authorization" = paste0("token ", pat)) } ) curl::handle_setheaders(h, .list = headers) res <- curl::curl_fetch_memory(url, handle = h) if (res$status_code >= 300) { stop(github_error(res)) } rawToChar(res$content) } else { tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) tmp <- tempfile() download(tmp, url, auth_token = pat) base64_decode(gsub("\\\\n", "", json$parse_file(tmp)$content)) } } github_error <- function(res) { res_headers <- curl::parse_headers_list(res$headers) ratelimit_limit <- res_headers$`x-ratelimit-limit` %||% NA_character_ ratelimit_remaining <- res_headers$`x-ratelimit-remaining` %||% NA_character_ ratelimit_reset <- .POSIXct(res_headers$`x-ratelimit-reset` %||% NA_character_, tz = "UTC") error_details <- json$parse(rawToChar(res$content))$message guidance <- "" if (identical(as.integer(ratelimit_remaining), 0L)) { guidance <- sprintf( "To increase your GitHub API rate limit - Use `usethis::browse_github_pat()` to create a Personal Access Token. - %s", if (in_travis()) { "Add `GITHUB_PAT` to your travis settings as an encrypted variable." } else { "Use `usethis::edit_r_environ()` and add the token as `GITHUB_PAT`." } ) } else if (identical(as.integer(res$status_code), 404L)) { repo_information <- re_match(res$url, "(repos)/(?P[^/]+)/(?P[^/]++)/") if(!is.na(repo_information$owner) && !is.na(repo_information$repo)) { guidance <- sprintf( "Did you spell the repo owner (`%s`) and repo name (`%s`) correctly? - If spelling is correct, check that you have the required permissions to access the repo.", repo_information$owner, repo_information$repo ) } else { guidance <- "Did you spell the repo owner and repo name correctly? - If spelling is correct, check that you have the required permissions to access the repo." } } if(identical(as.integer(res$status_code), 404L)) { msg <- sprintf( "HTTP error %s. %s %s", res$status_code, error_details, guidance ) } else if (!is.na(ratelimit_limit)) { msg <- sprintf( "HTTP error %s. %s Rate limit remaining: %s/%s Rate limit reset at: %s %s", res$status_code, error_details, ratelimit_remaining, ratelimit_limit, format(ratelimit_reset, usetz = TRUE), guidance ) } else { msg <- sprintf( "HTTP error %s. %s", res$status_code, error_details ) } status_type <- (as.integer(res$status_code) %/% 100) * 100 structure(list(message = msg, call = NULL), class = c(paste0("http_", unique(c(res$status_code, status_type, "error"))), "error", "condition")) } #> Error: HTTP error 404. #> Not Found #> #> Rate limit remaining: 4999 #> Rate limit reset at: 2018-10-10 19:43:52 UTC remotes/R/git.R0000644000176200001440000000451613341247544013054 0ustar liggesusers # Extract the commit hash from a git archive. Git archives include the SHA1 # hash as the comment field of the tarball pax extended header # (see https://www.kernel.org/pub/software/scm/git/docs/git-archive.html) # For GitHub archives this should be the first header after the default one # (512 byte) header. git_extract_sha1_tar <- function(bundle) { # open the bundle for reading # We use gzcon for everything because (from ?gzcon) # > Reading from a connection which does not supply a ‘gzip’ magic # > header is equivalent to reading from the original connection conn <- gzcon(file(bundle, open = "rb", raw = TRUE)) on.exit(close(conn)) # The default pax header is 512 bytes long and the first pax extended header # with the comment should be 51 bytes long # `52 comment=` (11 chars) + 40 byte SHA1 hash len <- 0x200 + 0x33 res <- rawToChar(readBin(conn, "raw", n = len)[0x201:len]) if (grepl("^52 comment=", res)) { sub("52 comment=", "", res) } else { NULL } } git <- function(args, quiet = TRUE, path = ".") { full <- paste0(shQuote(check_git_path()), " ", paste(args, collapse = "")) if (!quiet) { message(full) } result <- in_dir(path, system(full, intern = TRUE, ignore.stderr = quiet)) status <- attr(result, "status") %||% 0 if (!identical(as.character(status), "0")) { stop("Command failed (", status, ")", call. = FALSE) } result } # Retrieve the current running path of the git binary. # @param git_binary_name The name of the binary depending on the OS. git_path <- function(git_binary_name = NULL) { # Use user supplied path if (!is.null(git_binary_name)) { if (!file.exists(git_binary_name)) { stop("Path ", git_binary_name, " does not exist", .call = FALSE) } return(git_binary_name) } # Look on path git_path <- Sys.which("git")[[1]] if (git_path != "") return(git_path) # On Windows, look in common locations if (os_type() == "windows") { look_in <- c( "C:/Program Files/Git/bin/git.exe", "C:/Program Files (x86)/Git/bin/git.exe" ) found <- file.exists(look_in) if (any(found)) return(look_in[found][1]) } NULL } check_git_path <- function(git_binary_name = NULL) { path <- git_path(git_binary_name) if (is.null(path)) { stop("Git does not seem to be installed on your system.", call. = FALSE) } path } remotes/R/cran.R0000644000176200001440000000213013476462061013204 0ustar liggesuserscache <- new.env(parent = emptyenv()) #' @rdname available_packages #' @export available_packages_set <- function(repos, type, db) { signature <- rawToChar(serialize(list(repos, type), NULL, ascii = TRUE)) if (is.null(cache[[signature]])) { cache[[signature]] <- db } cache[[signature]] } #' @rdname available_packages #' @export available_packages_reset <- function() { rm(list = ls(envir = cache), envir = cache) } #' Simpler available.packages #' #' This is mostly equivalent to [utils::available.packages()] however it also #' caches the full result. Additionally the cache can be assigned explicitly with #' [available_packages_set()] and reset (cleared) with [available_packages_reset()]. #' #' @inheritParams utils::available.packages #' @keywords internal #' @seealso [utils::available.packages()] for full documentation on the output format. #' @export available_packages <- function(repos = getOption("repos"), type = getOption("pkgType")) { available_packages_set( repos, type, suppressWarnings(utils::available.packages(utils::contrib.url(repos, type), type = type)) ) } remotes/R/install-bitbucket.R0000644000176200001440000001544513577155371015723 0ustar liggesusers #' Install a package directly from Bitbucket #' #' This function is vectorised so you can install multiple packages in #' a single command. #' #' @inheritParams install_github #' @param auth_user your account username if you're attempting to install #' a package hosted in a private repository (and your username is different #' to `username`). Defaults to the `BITBUCKET_USER` environment #' variable. #' @param password your password. Defaults to the `BITBUCKET_PASSWORD` #' environment variable. See details for further information on setting #' up a password. #' @param ref Desired git reference; could be a commit, tag, or branch name. #' Defaults to master. #' @seealso Bitbucket API docs: #' #' #' @details To install from a private repo, or more generally, access the #' Bitbucket API with your own credentials, you will need to get an access #' token. You can create an access token following the instructions found in #' the #' \href{https://confluence.atlassian.com/bitbucket/app-passwords-828781300.html}{Bitbucket #' App Passwords documentation}. The App Password requires read-only access to #' your repositories and pull requests. Then store your password in the #' environment variable `BITBUCKET_PASSWORD` (e.g. `evelynwaugh:swordofhonour`) #' #' Note that on Windows, authentication requires the "libcurl" download #' method. You can set the default download method via the #' `download.file.method` option: #' ``` #' options(download.file.method = "libcurl") #' ``` #' In particular, if unset, RStudio sets the download method to "wininet". #' To override this, you might want to set it to "libcurl" in your #' R profile, see [base::Startup]. The caveat of the "libcurl" method is #' that it does _not_ set the system proxies automatically, see #' "Setting Proxies" in [utils::download.file()]. #' #' @inheritParams install_github #' @family package installation #' @export #' @examples #' \dontrun{ #' install_bitbucket("sulab/mygene.r@@default") #' install_bitbucket("djnavarro/lsr") #' } install_bitbucket <- function(repo, ref = "master", subdir = NULL, auth_user = bitbucket_user(), password = bitbucket_password(), host = "api.bitbucket.org/2.0", dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { remotes <- lapply(repo, bitbucket_remote, ref = ref, subdir = subdir, auth_user = auth_user, password = password, host = host) install_remotes(remotes, auth_user = auth_user, password = password, host = host, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } bitbucket_remote <- function(repo, ref = "master", subdir = NULL, auth_user = bitbucket_user(), password = bitbucket_password(), sha = NULL, host = "api.bitbucket.org/2.0", ...) { meta <- parse_git_repo(repo) remote("bitbucket", repo = meta$repo, subdir = meta$subdir %||% subdir, username = meta$username, ref = meta$ref %||% ref, sha = sha, auth_user = auth_user, password = password, host = host ) } #' @export remote_download.bitbucket_remote <- function(x, quiet = FALSE) { if (!quiet) { message("Downloading bitbucket repo ", x$username, "/", x$repo, "@", x$ref) } dest <- tempfile(fileext = paste0(".tar.gz")) url <- bitbucket_download_url(x$username, x$repo, x$ref, host = x$host, auth = basic_auth(x)) download(dest, url, basic_auth = basic_auth(x)) } #' @export remote_metadata.bitbucket_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { if (!is.null(bundle)) { # Might be able to get from archive sha <- git_extract_sha1_tar(bundle) } else if (is.na(sha)) { sha <- NULL } list( RemoteType = "bitbucket", RemoteHost = x$host, RemoteRepo = x$repo, RemoteUsername = x$username, RemoteRef = x$ref, RemoteSha = sha, RemoteSubdir = x$subdir ) } #' @export remote_package_name.bitbucket_remote <- function(remote, ...) { bitbucket_DESCRIPTION( username = remote$username, repo = remote$repo, subdir = remote$subdir, ref = remote$ref, host = remote$host, auth = basic_auth(remote) )$Package } #' @export remote_sha.bitbucket_remote <- function(remote, ...) { bitbucket_commit(username = remote$username, repo = remote$repo, host = remote$host, ref = remote$ref, auth = basic_auth(remote))$hash %||% NA_character_ } #' @export format.bitbucket_remote <- function(x, ...) { "Bitbucket" } bitbucket_commit <- function(username, repo, ref = "master", host = "api.bitbucket.org/2.0", auth = NULL) { url <- build_url(host, "repositories", username, repo, "commit", ref) tmp <- tempfile() download(tmp, url, basic_auth = auth) json$parse_file(tmp) } bitbucket_DESCRIPTION <- function(username, repo, subdir = NULL, ref = "master", host = "https://api.bitbucket.org/2.0", auth = NULL,...) { url <- build_url(host, "repositories", username, repo, "src", ref, subdir, "DESCRIPTION") tmp <- tempfile() download(tmp, url, basic_auth = auth) read_dcf(tmp) } basic_auth <- function(x) { if (!is.null(x$password)) { list( user = x$auth_user %||% x$username, password = x$password ) } else { NULL } } bitbucket_download_url <- function(username, repo, ref = "master", host = "api.bitbucket.org/2.0", auth = NULL) { url <- build_url(host, "repositories", username, repo) tmp <- tempfile() download(tmp, url, basic_auth = auth) paste0(build_url(json$parse_file(tmp)$links$html$href, "get", ref), ".tar.gz") } bitbucket_password <- function(quiet = TRUE) { pass <- Sys.getenv("BITBUCKET_PASSWORD") if (identical(pass, "")) return(NULL) if (!quiet) message("Using bitbucket password from envvar BITBUCKET_PASSWORD") pass } bitbucket_user <- function(quiet = TRUE) { user <- Sys.getenv("BITBUCKET_USER") if (identical(user, "")) return(NULL) if (!quiet) message("Using bitbucket user from envvar BITBUCKET_USER") user } remotes/R/circular.R0000644000176200001440000000124613406450626014071 0ustar liggesusers ## A environment to hold which packages are being installed so packages ## with circular dependencies can be skipped the second time. installing <- new.env(parent = emptyenv()) is_root_install <- function() is.null(installing$packages) exit_from_root_install <- function() installing$packages <- NULL check_for_circular_dependencies <- function(pkgdir, quiet) { pkgdir <- normalizePath(pkgdir) pkg <- get_desc_field(file.path(pkgdir, "DESCRIPTION"), "Package") if (pkg %in% installing$packages) { if (!quiet) message("Skipping ", pkg, ", it is already being installed") TRUE } else { installing$packages <- c(installing$packages, pkg) FALSE } } remotes/R/install-local.R0000644000176200001440000000576713502730612015030 0ustar liggesusers #' Install a package from a local file #' #' This function is vectorised so you can install multiple packages in #' a single command. #' #' @param path path to local directory, or compressed file (tar, zip, tar.gz #' tar.bz2, tgz2 or tbz) #' @inheritParams install_url #' @inheritParams install_github #' @export #' @family package installation #' @examples #' \dontrun{ #' dir <- tempfile() #' dir.create(dir) #' pkg <- download.packages("testthat", dir, type = "source") #' install_local(pkg[, 2]) #' } install_local <- function(path = ".", subdir = NULL, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = !is_binary_pkg(path), build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { remotes <- lapply(path, local_remote, subdir = subdir) install_remotes(remotes, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } local_remote <- function(path, subdir = NULL, branch = NULL, args = character(0), ...) { remote("local", path = normalizePath(path), subdir = subdir ) } #' @export remote_download.local_remote <- function(x, quiet = FALSE) { # Already downloaded - just need to copy to tempdir() bundle <- tempfile() dir.create(bundle) suppressWarnings( res <- file.copy(x$path, bundle, recursive = TRUE) ) if (!all(res)) { stop("Could not copy `", x$path, "` to `", bundle, "`", call. = FALSE) } # file.copy() creates directory inside of bundle dir(bundle, full.names = TRUE)[1] } #' @export remote_metadata.local_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { list( RemoteType = "local", RemoteUrl = x$path, RemoteSubdir = x$subdir ) } #' @export remote_package_name.local_remote <- function(remote, ...) { is_tarball <- !dir.exists(remote$path) if (is_tarball) { # Assume the name is the name of the tarball return(sub("_.*$", "", basename(remote$path))) } description_path <- file.path(remote$path, "DESCRIPTION") read_dcf(description_path)$Package } #' @export remote_sha.local_remote <- function(remote, ...) { is_tarball <- !dir.exists(remote$path) if (is_tarball) { return(NA_character_) } read_dcf(file.path(remote$path, "DESCRIPTION"))$Version } #' @export format.local_remote <- function(x, ...) { "local" } remotes/R/dcf.R0000644000176200001440000000053313243326354013016 0ustar liggesusersread_dcf <- function(path) { fields <- colnames(read.dcf(path)) as.list(read.dcf(path, keep.white = fields)[1, ]) } write_dcf <- function(path, desc) { write.dcf( rbind(unlist(desc)), file = path, keep.white = names(desc), indent = 0 ) } get_desc_field <- function(path, field) { dcf <- read_dcf(path) dcf[[field]] } remotes/R/install-bioc.R0000644000176200001440000002055513577155371014661 0ustar liggesusers#' Install a development package from the Bioconductor git repository #' #' This function requires `git` to be installed on your system in order to #' be used. #' #' It is vectorised so you can install multiple packages with #' a single command. #' #' This is intended as an aid for Bioconductor developers. If you want to #' install the release version of a Bioconductor package one can use the #' `BiocManager` package. #' @inheritParams install_git #' @param repo Repository address in the format #' `[username:password@@][release/]repo[#commit]`. Valid values for #' the release are \sQuote{devel}, #' \sQuote{release} (the default if none specified), or numeric release #' numbers (e.g. \sQuote{3.3}). #' @param mirror The Bioconductor git mirror to use #' @param ... Other arguments passed on to [utils::install.packages()]. #' @inheritParams install_github #' @export #' @family package installation #' @examples #' \dontrun{ #' install_bioc("SummarizedExperiment") #' install_bioc("release/SummarizedExperiment") #' install_bioc("3.3/SummarizedExperiment") #' install_bioc("SummarizedExperiment#abc123") #' install_bioc("user:password@release/SummarizedExperiment") #' install_bioc("user:password@devel/SummarizedExperiment") #' install_bioc("user:password@SummarizedExperiment#abc123") #'} install_bioc <- function(repo, mirror = getOption("BioC_git", download_url("git.bioconductor.org/packages")), git = c("auto", "git2r", "external"), dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { remotes <- lapply(repo, bioc_remote, mirror = mirror, git = match.arg(git)) install_remotes(remotes, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } bioc_remote <- function(repo, mirror = getOption("BioC_git", download_url("git.bioconductor.org/packages")), git = c("auto", "git2r", "external"), ...) { git <- match.arg(git) if (git == "auto") { git <- if (!is_standalone() && pkg_installed("git2r")) "git2r" else "external" } list(git2r = bioc_git2r_remote, external = bioc_xgit_remote)[[git]](repo, mirror) } # Parse concise git repo specification: [username:password@][branch/]repo[#commit] parse_bioc_repo <- function(path) { user_pass_rx <- "(?:([^:]+):([^:@]+)@)?" release_rx <- "(?:(devel|release|[0-9.]+)/)?" repo_rx <- "([^/@#]+)" commit_rx <- "(?:[#]([a-zA-Z0-9]+))?" bioc_rx <- sprintf("^(?:%s%s%s%s|(.*))$", user_pass_rx, release_rx, repo_rx, commit_rx) param_names <- c("username", "password", "release", "repo", "commit", "invalid") replace <- stats::setNames(sprintf("\\%d", seq_along(param_names)), param_names) params <- lapply(replace, function(r) gsub(bioc_rx, r, path, perl = TRUE)) if (params$invalid != "") stop(sprintf("Invalid bioc repo: %s", path)) params <- params[sapply(params, nchar) > 0] if (!is.null(params$release) && !is.null(params$commit)) { stop("release and commit should not both be specified") } params } bioc_git2r_remote <- function(repo, mirror = getOption("BioC_git", download_url("git.bioconductor.org/packages"))) { meta <- parse_bioc_repo(repo) branch <- bioconductor_branch(meta$release, meta$sha) if (!is.null(meta$username) && !is.null(meta$password)) { meta$credentials <- git2r::cred_user_pass(meta$username, meta$password) } remote("bioc_git2r", mirror = mirror, repo = meta$repo, release = meta$release %||% "release", sha = meta$commit, branch = branch, credentials = meta$credentials ) } bioc_xgit_remote <- function(repo, mirror = getOption("BioC_git", download_url("git.bioconductor.org/packages"))) { meta <- parse_bioc_repo(repo) branch <- bioconductor_branch(meta$release, meta$sha) if (!is.null(meta$username) && !is.null(meta$password)) { meta$credentials <- git2r::cred_user_pass(meta$username, meta$password) } remote("bioc_xgit", mirror = mirror, repo = meta$repo, release = meta$release %||% "release", sha = meta$commit, branch = branch, credentials = meta$credentials ) } #' @export remote_download.bioc_git2r_remote <- function(x, quiet = FALSE) { url <- paste0(x$mirror, "/", x$repo) if (!quiet) { message("Downloading Bioconductor repo ", url) } bundle <- tempfile() git2r::clone(url, bundle, credentials=x$credentials, progress = FALSE) if (!is.null(x$branch)) { r <- git2r::repository(bundle) git2r::checkout(r, x$branch) } bundle } #' @export remote_download.bioc_xgit_remote <- function(x, quiet = FALSE) { url <- paste0(x$mirror, "/", x$repo) if (!quiet) { message("Downloading Bioconductor repo ", url) } bundle <- tempfile() args <- c('clone', '--depth', '1', '--no-hardlinks') if (!is.null(x$branch)) { args <- c(args, "--branch", x$branch) } args <- c(args, x$args, url, bundle) git(paste0(args, collapse = " "), quiet = quiet) bundle } #' @export remote_metadata.bioc_git2r_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { url <- paste0(x$mirror, "/", x$repo) if (!is.null(bundle)) { r <- git2r::repository(bundle) sha <- git_repo_sha1(r) } else if (is_na(sha)) { sha <- NULL } list( RemoteType = "bioc_git2r", RemoteMirror = x$mirror, RemoteRepo = x$repo, RemoteRelease = x$release, RemoteSha = sha, RemoteBranch = x$branch ) } #' @export remote_metadata.bioc_xgit_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { if (is_na(sha)) { sha <- NULL } list( RemoteType = "bioc_xgit", RemoteMirror = x$mirror, RemoteRepo = x$repo, RemoteRelease = x$release, RemoteSha = sha, RemoteBranch = x$branch, RemoteArgs = if (length(x$args) > 0) paste0(deparse(x$args), collapse = " ") ) } #' @export remote_package_name.bioc_git2r_remote <- function(remote, ...) { remote$repo } #' @export remote_package_name.bioc_xgit_remote <- function(remote, ...) { remote$repo } #' @export remote_sha.bioc_git2r_remote <- function(remote, ...) { tryCatch({ url <- paste0(remote$mirror, "/", remote$repo) res <- git2r::remote_ls(url, credentials=remote$credentials) found <- grep(pattern = paste0("/", remote$branch), x = names(res)) if (length(found) == 0) { return(NA_character_) } unname(res[found[1]]) }, error = function(e) NA_character_) } remote_sha.bioc_xgit_remote <- function(remote, ...) { url <- paste0(remote$mirror, "/", remote$repo) ref <- remote$branch refs <- git(paste("ls-remote", url, ref)) refs_df <- read.delim(text = refs, stringsAsFactors = FALSE, sep = "\t", header = FALSE) names(refs_df) <- c("sha", "ref") refs_df$sha[[1]] %||% NA_character_ } bioconductor_branch <- function(release, sha) { if (!is.null(sha)) { sha } else { if (is.null(release)) { release <- "release" } if (release == "release") { release <- bioconductor_release() } switch( tolower(release), devel = "master", paste0("RELEASE_", gsub("\\.", "_", release)) ) } } bioconductor_release <- function() { tmp <- tempfile() download(tmp, download_url("bioconductor.org/config.yaml"), quiet = TRUE) gsub("release_version:[[:space:]]+\"([[:digit:].]+)\"", "\\1", grep("release_version:", readLines(tmp), value = TRUE)) } #' @export format.bioc_git2r_remote <- function(x, ...) { "Bioc" } #' @export format.bioc_xgit_remote <- function(x, ...) { "Bioc" } # sha of most recent commit git_repo_sha1 <- function(r) { rev <- git2r::repository_head(r) if (is.null(rev)) { return(NULL) } if (git2r::is_commit(rev)) { rev$sha } else { git2r::branch_target(rev) } } remotes/R/install-version.R0000644000176200001440000001046413526527443015425 0ustar liggesusers #' Install specified version of a CRAN package. #' #' If you are installing an package that contains compiled code, you will #' need to have an R development environment installed. You can check #' if you do by running `devtools::has_devel` (you need the #' `devtools` package for this). #' #' @export #' @family package installation #' @param package package name #' @param version If the specified version is NULL or the same as the most #' recent version of the package, this function simply calls #' [utils::install.packages()]. Otherwise, it looks at the list of #' archived source tarballs and tries to install an older version instead. #' @param ... Other arguments passed on to [utils::install.packages()]. #' @inheritParams utils::install.packages #' @inheritParams install_github #' @author Jeremy Stephens #' @importFrom utils available.packages contrib.url install.packages install_version <- function(package, version = NULL, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = FALSE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = "source", ...) { if (!identical(type, "source")) { stop("`type` must be 'source' for `install_version()`", call. = FALSE) } url <- download_version_url(package, version, repos, type) res <- install_url(url, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) lib <- list(...)$lib %||% .libPaths() # Remove Metadata from installed package add_metadata( system.file(package = package, lib.loc = lib), list(RemoteType = NULL, RemoteUrl = NULL, RemoteSubdir = NULL)) invisible(res) } package_find_repo <- function(package, repos) { for (repo in repos) { if (length(repos) > 1) message("Trying ", repo) archive <- tryCatch({ con <- gzcon(url(sprintf("%s/src/contrib/Meta/archive.rds", repo), "rb")) on.exit(close(con)) readRDS(con) }, warning = function(e) list(), error = function(e) list()) info <- archive[[package]] if (!is.null(info)) { info$repo <- repo return(info) } } stop(sprintf("couldn't find package '%s'", package)) } #' Download a specified version of a CRAN package #' #' It downloads the package to a temporary file, and #' returns the name of the file. #' #' @inheritParams install_version #' @return Name of the downloaded file. #' #' @export download_version <- function(package, version = NULL, repos = getOption("repos"), type = getOption("pkgType"), ...) { url <- download_version_url(package, version, repos, type) download(path = tempfile(), url = url) } download_version_url <- function(package, version, repos, type) { contriburl <- contrib.url(repos, type) available <- available.packages(contriburl) if (package %in% row.names(available)) { current.version <- available[package, 'Version'] if (is.null(version) || version == current.version) { row <- available[which(rownames(available) == package)[1], ] return(paste0( row[["Repository"]], "/", row[["Package"]], "_", row[["Version"]], ".tar.gz" )) } } info <- package_find_repo(package, repos) if (is.null(version)) { # Grab the latest one: only happens if pulled from CRAN package.path <- row.names(info)[nrow(info)] } else { package.path <- paste(package, "/", package, "_", version, ".tar.gz", sep = "") if (!(package.path %in% row.names(info))) { stop(sprintf("version '%s' is invalid for package '%s'", version, package)) } } paste(info$repo[1L], "/src/contrib/Archive/", package.path, sep = "") } remotes/R/devel.R0000644000176200001440000000415213354676455013377 0ustar liggesusers ## The checking code looks for the objects in the package namespace, so defining ## dll here removes the following NOTE ## Registration problem: ## Evaluating ‘dll$foo’ during check gives error ## ‘object 'dll' not found’: ## .C(dll$foo, 0L) ## See https://github.com/wch/r-source/blob/d4e8fc9832f35f3c63f2201e7a35fbded5b5e14c/src/library/tools/R/QC.R##L1950-L1980 ## Setting the class is needed to avoid a note about returning the wrong class. ## The local object is found first in the actual call, so current behavior is ## unchanged. dll <- list(foo = structure(list(), class = "NativeSymbolInfo")) has_devel <- function() { tryCatch( has_devel2(), error = function(e) FALSE ) } ## This is similar to devtools:::has_devel(), with some ## very minor differences. has_devel2 <- function() { foo_path <- file.path(tempfile(fileext = ".c")) cat("void foo(int *bar) { *bar=1; }\n", file = foo_path) on.exit(unlink(foo_path)) R(c("CMD", "SHLIB", basename(foo_path)), dirname(foo_path)) dylib <- sub("\\.c$", .Platform$dynlib.ext, foo_path) on.exit(unlink(dylib), add = TRUE) dll <- dyn.load(dylib) on.exit(dyn.unload(dylib), add = TRUE) stopifnot(.C(dll$foo, 0L)[[1]] == 1L) TRUE } missing_devel_warning <- function(pkgdir) { pkgname <- tryCatch( get_desc_field(file.path(pkgdir, "DESCRIPTION"), "Package"), error = function(e) NULL ) %||% "" sys <- sys_type() warning( "Package ", pkgname, " has compiled code, but no suitable ", "compiler(s) were found. Installation will likely fail.\n ", if (sys == "windows") { c("Install Rtools (https://cran.r-project.org/bin/windows/Rtools/).", "Then use the pkgbuild package, or make sure that Rtools in the PATH.") }, if (sys == "macos") "Install XCode and make sure it works.", if (sys == "linux") "Install compilers via your Linux package manager." ) } R <- function(args, path = tempdir()) { r <- file.path(R.home("bin"), "R") args <- c( "--no-site-file", "--no-environ", "--no-save", "--no-restore", "--quiet", args ) system_check(r, args, path = path) } remotes/R/submodule.R0000644000176200001440000000661213621300305014251 0ustar liggesusersparse_submodules <- function(file) { if (grepl("\n", file)) { x <- strsplit(file, "\n")[[1]] } else { x <- readLines(file) } # https://git-scm.com/docs/git-config#_syntax # Subsection names are case sensitive and can contain any characters except # newline and the null byte. Doublequote " and backslash can be included by # escaping them as \" and \\ double_quoted_string_with_escapes <- '(?:\\\\.|[^"])*' # Otherwise extract section names section_names <- re_match( x, sprintf('^[[:space:]]*\\[submodule "(?%s)"\\][[:space:]]*$', double_quoted_string_with_escapes) )$submodule # If no sections found return the empty list if (all(is.na(section_names))) { return(list()) } # Extract name = value # The variable names are case-insensitive, allow only alphanumeric characters # and -, and must start with an alphabetic character. variable_name <- "[[:alpha:]][[:alnum:]-]*" mapping_values <- re_match( x, sprintf('^[[:space:]]*(?%s)[[:space:]]*=[[:space:]]*(?.*)[[:space:]]*$', variable_name), ) values <- cbind(submodule = fill(section_names), mapping_values[c("name", "value")], stringsAsFactors = FALSE) values <- values[!is.na(mapping_values$.match), ] # path and valid url are required if (!all(c("path", "url") %in% values$name)) { warning("Invalid submodule definition, skipping submodule installation", immediate. = TRUE, call. = FALSE) return(list()) } # Roughly equivalent to tidyr::spread(values, name, value) res <- stats::reshape(values, idvar = "submodule", timevar = "name", v.name = "value", direction = "wide") # Set the column names, reshape prepends `value.` to path, url and branch colnames(res) <- gsub("value[.]", "", colnames(res)) # path and valid url are required if (any(is.na(res$url), is.na(res$path))) { warning("Invalid submodule definition, skipping submodule installation", immediate. = TRUE, call. = FALSE) return(list()) } # branch is optional if (!exists("branch", res)) { res$branch <- NA_character_ } # Remove unneeded attribute attr(res, "reshapeWide") <- NULL # Remove rownames rownames(res) <- NULL res } # Adapted from https://stackoverflow.com/a/9517731/2055486 fill <- function(x) { not_missing <- !is.na(x) res <- x[not_missing] res[cumsum(not_missing)] } update_submodule <- function(url, path, branch, quiet) { args <- c('clone', '--depth', '1', '--no-hardlinks --recurse-submodules') if (length(branch) > 0 && !is.na(branch)) { args <- c(args, "--branch", branch) } args <- c(args, url, path) git(paste0(args, collapse = " "), quiet = quiet) } update_submodules <- function(source, subdir, quiet) { file <- file.path(source, ".gitmodules") if (!file.exists(file)) { if (!is.null(subdir)) { nb_sub_folders <- lengths(strsplit(subdir, "/")) source <- do.call(file.path, as.list(c(source, rep("..", nb_sub_folders)))) } file <- file.path(source, ".gitmodules") if (!file.exists(file)) { return() } } info <- parse_submodules(file) # Fixes #234 if (length(info) == 0) { return() } to_ignore <- in_r_build_ignore(info$path, file.path(source, ".Rbuildignore")) if (!(length(info) > 0)) { return() } info <- info[!to_ignore, ] for (i in seq_len(NROW(info))) { update_submodule(info$url[[i]], file.path(source, info$path[[i]]), info$branch[[i]], quiet) } } remotes/R/json.R0000644000176200001440000000722013476462075013244 0ustar liggesusers # Standalone JSON parser # # The purpose of this file is to provide a standalone JSON parser. # It is quite slow and bare. If you need a proper parser please use the # jsonlite package. # # The canonical location of this file is in the remotes package: # https://github.com/r-lib/remotes/blob/master/R/json.R # # API: # parse(text) # parse_file(filename) # # NEWS: # - 2019/05/15 First standalone version json <- local({ tokenize_json <- function(text) { text <- paste(text, collapse = "\n") ESCAPE <- '(\\\\[^u[:cntrl:]]|\\\\u[0-9a-fA-F]{4})' CHAR <- '[^[:cntrl:]"\\\\]' STRING <- paste0('"', CHAR, '*(', ESCAPE, CHAR, '*)*"') NUMBER <- "-?(0|[1-9][0-9]*)([.][0-9]*)?([eE][+-]?[0-9]*)?" KEYWORD <- 'null|false|true' SPACE <- '[[:space:]]+' match <- gregexpr( pattern = paste0( STRING, "|", NUMBER, "|", KEYWORD, "|", SPACE, "|", "." ), text = text, perl = TRUE ) grep("^\\s+$", regmatches(text, match)[[1]], value = TRUE, invert = TRUE) } throw <- function(...) { stop("JSON: ", ..., call. = FALSE) } # Parse a JSON file # # @param filename Path to the JSON file. # @return R objects corresponding to the JSON file. parse_file <- function(filename) { parse(readLines(filename, warn = FALSE)) } # Parse a JSON string # # @param text JSON string. # @return R object corresponding to the JSON string. parse <- function(text) { tokens <- tokenize_json(text) token <- NULL ptr <- 1 read_token <- function() { if (ptr <= length(tokens)) { token <<- tokens[ptr] ptr <<- ptr + 1 } else { token <<- 'EOF' } } parse_value <- function(name = "") { if (token == "{") { parse_object() } else if (token == "[") { parse_array() } else if (token == "EOF" || (nchar(token) == 1 && ! token %in% 0:9)) { throw("EXPECTED value GOT ", token) } else { j2r(token) } } parse_object <- function() { res <- structure(list(), names = character()) read_token() ## Invariant: we are at the beginning of an element while (token != "}") { ## "key" if (grepl('^".*"$', token)) { key <- j2r(token) } else { throw("EXPECTED string GOT ", token) } ## : read_token() if (token != ":") { throw("EXPECTED : GOT ", token) } ## value read_token() res[key] <- list(parse_value()) ## } or , read_token() if (token == "}") { break } else if (token != ",") { throw("EXPECTED , or } GOT ", token) } read_token() } res } parse_array <- function() { res <- list() read_token() ## Invariant: we are at the beginning of an element while (token != "]") { ## value res <- c(res, list(parse_value())) ## ] or , read_token() if (token == "]") { break } else if (token != ",") { throw("EXPECTED , GOT ", token) } read_token() } res } read_token() parse_value(tokens) } j2r <- function(token) { if (token == "null") { NULL } else if (token == "true") { TRUE } else if (token == "false") { FALSE } else if (grepl('^".*"$', token)) { trimq(token) } else { as.numeric(token) } } trimq <- function(x) { sub('^"(.*)"$', "\\1", x) } structure( list( .internal = environment(), parse = parse, parse_file = parse_file ), class = c("standalone_json", "standalone")) }) remotes/R/install-dev.R0000644000176200001440000000463513476462075014524 0ustar liggesusers#' Install the development version of a package #' #' `install_dev()` retrieves the package DESCRIPTION from the CRAN mirror and #' looks in the 'URL' and 'BugReports' fields for GitHub, GitLab or Bitbucket #' URLs. It then calls the appropriate `install_()` function to install the #' development package. #' #' @param package The package name to install. #' @param cran_url The URL of the CRAN mirror to use, by default based on the #' 'repos' option. If unset uses 'https://cloud.r-project.org'. #' @param ... Additional arguments passed to [install_github()], #' [install_gitlab()], or [install_bitbucket()] functions. #' @family package installation #' @export #' @examples #' \dontrun{ #' # From GitHub #' install_dev("dplyr") #' #' # From GitLab #' install_dev("iemiscdata") #' #' # From Bitbucket #' install_dev("argparser") #' } install_dev <- function(package, cran_url = getOption("repos")[["CRAN"]], ...) { if (is.null(cran_url) || identical(cran_url, "@CRAN@")) { cran_url <- "https://cloud.r-project.org" } refs <- dev_split_ref(package) url <- build_url(cran_url, "web", "packages", refs[["pkg"]], "DESCRIPTION") f <- tempfile() on.exit(unlink(f)) download(f, url) desc <- read_dcf(f) url_fields <- c(desc$URL, desc$BugReports) if (length(url_fields) == 0) { stop("Could not determine development repository", call. = FALSE) } pkg_urls <- unlist(strsplit(url_fields, "[[:space:]]*,[[:space:]]*")) # Remove trailing "/issues" from the BugReports URL pkg_urls <- sub("/issues$", "", pkg_urls) valid_domains <- c("github[.]com", "gitlab[.]com", "bitbucket[.]org") parts <- re_match(pkg_urls, sprintf("^https?://(?%s)/(?%s)/(?%s)(?:/(?%s))?", domain = paste0(valid_domains, collapse = "|"), username = "[^/]+", repo = "[^/@#]+", subdir = "[^/@$ ]+" ) )[c("domain", "username", "repo", "subdir")] # Remove cases which don't match and duplicates parts <- unique(stats::na.omit(parts)) if (nrow(parts) != 1) { stop("Could not determine development repository", call. = FALSE) } full_ref <- paste0( paste0(c(parts$username, parts$repo, if (nzchar(parts$subdir)) parts$subdir), collapse = "/"), refs[["ref"]] ) switch(parts$domain, github.com = install_github(full_ref, ...), gitlab.com = install_gitlab(full_ref, ...), bitbucket.org = install_bitbucket(full_ref, ...) ) } remotes/R/parse-git.R0000644000176200001440000000750313476462075014172 0ustar liggesusers#' Parse a remote git repo specification #' #' A remote repo can be specified in two ways: #' \describe{ #' \item{as a URL}{`parse_github_url()` handles HTTPS and SSH remote URLs #' and various GitHub browser URLs} #' \item{via a shorthand}{`parse_repo_spec()` handles this concise form: #' `[username/]repo[/subdir][#pull|@ref|@*release]`} #' } #' #' @param repo Character scalar, the repo specification. #' @return List with members: `username`, `repo`, `subdir` #' `ref`, `pull`, `release`, some which will be empty. #' #' @name parse-git-repo #' @examples #' parse_repo_spec("metacran/crandb") #' parse_repo_spec("jimhester/covr#47") ## pull request #' parse_repo_spec("jeroen/curl@v0.9.3") ## specific tag #' parse_repo_spec("tidyverse/dplyr@*release") ## shorthand for latest release #' parse_repo_spec("r-lib/remotes@550a3c7d3f9e1493a2ba") ## commit SHA #' parse_repo_spec("igraph=igraph/rigraph") ## Different package name from repo name #' #' parse_github_url("https://github.com/jeroen/curl.git") #' parse_github_url("git@github.com:metacran/crandb.git") #' parse_github_url("https://github.com/jimhester/covr") #' parse_github_url("https://github.example.com/user/repo.git") #' parse_github_url("git@github.example.com:user/repo.git") #' #' parse_github_url("https://github.com/r-lib/remotes/pull/108") #' parse_github_url("https://github.com/r-lib/remotes/tree/name-of-branch") #' parse_github_url("https://github.com/r-lib/remotes/commit/1234567") #' parse_github_url("https://github.com/r-lib/remotes/releases/latest") #' parse_github_url("https://github.com/r-lib/remotes/releases/tag/1.0.0") NULL #' @export #' @rdname parse-git-repo parse_repo_spec <- function(repo) { package_name_rx <- "(?:(?[[:alpha:]][[:alnum:].]*[[:alnum:]])=)?" username_rx <- "(?:(?[^/]+)/)" repo_rx <- "(?[^/@#]+)" subdir_rx <- "(?:/(?[^@#]*[^@#/])/?)?" ref_rx <- "(?:@(?[^*].*))" pull_rx <- "(?:#(?[0-9]+))" release_rx <- "(?:@(?[*]release))" ref_or_pull_or_release_rx <- sprintf( "(?:%s|%s|%s)?", ref_rx, pull_rx, release_rx ) spec_rx <- sprintf( "^%s%s%s%s%s$", package_name_rx, username_rx, repo_rx, subdir_rx, ref_or_pull_or_release_rx ) params <- as.list(re_match(text = repo, pattern = spec_rx)) if (is.na(params$.match)) { stop(sprintf("Invalid git repo specification: '%s'", repo)) } params[grepl("^[^\\.]", names(params))] } #' @export #' @rdname parse-git-repo parse_github_repo_spec <- parse_repo_spec #' @export #' @rdname parse-git-repo parse_github_url <- function(repo) { prefix_rx <- "(?:github[^/:]+[/:])" username_rx <- "(?:(?[^/]+)/)" repo_rx <- "(?[^/@#]+)" ref_rx <- "(?:(?:tree|commit|releases/tag)/(?.+$))" pull_rx <- "(?:pull/(?.+$))" release_rx <- "(?:releases/)(?.+$)" ref_or_pull_or_release_rx <- sprintf( "(?:/(%s|%s|%s))?", ref_rx, pull_rx, release_rx ) url_rx <- sprintf( "%s%s%s%s", prefix_rx, username_rx, repo_rx, ref_or_pull_or_release_rx ) params <- as.list(re_match(text = repo, pattern = url_rx)) if (is.na(params$.match)) { stop(sprintf("Invalid GitHub URL: '%s'", repo)) } if (params$ref == "" && params$pull == "" && params$release == "") { params$repo <- gsub("\\.git$", "", params$repo) } if (params$release == "latest") { params$release <- "*release" } params[grepl("^[^\\.]", names(params))] } parse_git_repo <- function(repo) { if (grepl("^https://github|^git@github", repo)) { params <- parse_github_url(repo) } else { params <- parse_repo_spec(repo) } params <- params[viapply(params, nchar) > 0] if (!is.null(params$pull)) { params$ref <- github_pull(params$pull) params$pull <- NULL } if (!is.null(params$release)) { params$ref <- github_release() params$release <- NULL } params } remotes/R/bioc-standalone.R0000644000176200001440000002353213621266367015337 0ustar liggesusers#' Tools for Bioconductor versions and repositories #' #' \section{API:} #' #' ``` #' get_yaml_config(forget = FALSE) #' set_yaml_config(text) #' #' get_release_version(forget = FALSE) #' get_devel_version(forget = FALSE) #' #' get_version_map(forget = FALSE) #' get_matching_bioc_version(r_version = getRversion(), forget = FALSE) #' get_bioc_version(r_version = getRversion(), forget = FALSE) #' #' get_repos(bioc_version = "auto", forget = FALSE) #' ``` #' #' * `forget`: Whether to forget the cached version of the Bioconductor #' config YAML file and download it again. #' * `text`: character vector (linewise) or scalar, the contents of the #' `config.yaml` file, if obtained externally, to be used as a cached #' version in the future. #' * `r_version`: R version string, or `package_version` object. #' * `bioc_version`: Bioc version string or `package_version` object, #' or the string `"auto"` to use the one matching the current R version. #' #' `get_yaml_config()` returns the raw contents of the `config.yaml` file, #' linewise. It is typically not needed, except if one needs information #' that cannot be surfaces via the other API functions. #' #' `set_yaml_config()` can be used to _set_ the contents of the #' `config.yaml` file. This is useful, if one has already obtained it #' externally, but wants to use the obtained file with the rest of the #' bioc standalone code. #' #' `get_release_version()` returns the version of the current Bioconductor #' release. #' #' `get_devel_version()` returns the version of the current development #' version of Bioconductor. #' #' `get_version_map()` return the mapping between R versions and #' Bioconductor versions. Note that this is not a one to one mapping. #' E.g. currently R `3.6.x` maps to both Bioc `3.9` (Bioc release) and #' `3.10` (Bioc devel); and also Bioc `3.10` maps to both R `3.6.x` and #' R `3.7.x` (current R-devel). It returns a data frame with three columns: #' `bioc_version`, `r_version` and `bioc_status`. The first two columns #' contain `package_vesion` objects, the third is a factor with levels: #' `out-of-date`, `release`, `devel`, `future`. #' #' `get_matching_bioc_version()` returns the matching Bioc version for an #' R version. If the R version matches to both a released and a devel #' version, then the released version is chosen. #' #' `get_bioc_version()` returns the matching Bioc version for the #' specified R version. It does observe the `R_BIOC_VERSION` environment #' variable, which can be used to force a Bioconductor version. If this is #' not set, it just calls `get_matching_bioc_version()`. #' #' `get_repos()` returns the Bioc repositories of the specified Bioc #' version. It defaults to the Bioc version that matches the calling R #' version. It returns a named character vector. #' #' \section{NEWS:} #' * 2019-05-30 First version in remotes. #' #' #' @name bioconductor #' @keywords internal #' @noRd NULL bioconductor <- local({ # ------------------------------------------------------------------- # Configuration that does not change often config_url <- "https://bioconductor.org/config.yaml" builtin_map <- list( "2.1" = package_version("1.6"), "2.2" = package_version("1.7"), "2.3" = package_version("1.8"), "2.4" = package_version("1.9"), "2.5" = package_version("2.0"), "2.6" = package_version("2.1"), "2.7" = package_version("2.2"), "2.8" = package_version("2.3"), "2.9" = package_version("2.4"), "2.10" = package_version("2.5"), "2.11" = package_version("2.6"), "2.12" = package_version("2.7"), "2.13" = package_version("2.8"), "2.14" = package_version("2.9"), "2.15" = package_version("2.11"), "3.0" = package_version("2.13"), "3.1" = package_version("3.0"), "3.2" = package_version("3.2"), "3.3" = package_version("3.4"), "3.4" = package_version("3.6"), "3.5" = package_version("3.8"), "3.6" = package_version("3.10") ) # ------------------------------------------------------------------- # Cache devel_version <- NULL release_version <- NULL version_map <- NULL yaml_config <- NULL # ------------------------------------------------------------------- # API get_yaml_config <- function(forget = FALSE) { if (forget || is.null(yaml_config)) { new <- tryCatch(read_url(config_url), error = function(x) x) if (inherits(new, "error")) { http_url <- sub("^https", "http", config_url) new <- tryCatch(read_url(http_url), error = function(x) x) } if (inherits(new, "error")) stop(new) yaml_config <<- new } yaml_config } set_yaml_config <- function(text) { if (length(text) == 1) text <- strsplit(text, "\n", fixed = TRUE)[[1]] yaml_config <<- text } get_release_version <- function(forget = FALSE) { if (forget || is.null(release_version)) { yaml <- get_yaml_config(forget) pattern <- "^release_version: \"(.*)\"" release_version <<- package_version( sub(pattern, "\\1", grep(pattern, yaml, value=TRUE)) ) } release_version } get_devel_version <- function(forget = FALSE) { if (forget || is.null(devel_version)) { yaml <- get_yaml_config(forget) pattern <- "^devel_version: \"(.*)\"" devel_version <<- package_version( sub(pattern, "\\1", grep(pattern, yaml, value=TRUE)) ) } devel_version } get_version_map <- function(forget = FALSE) { if (forget || is.null(version_map)) { txt <- get_yaml_config(forget) grps <- grep("^[^[:blank:]]", txt) start <- match(grep("r_ver_for_bioc_ver", txt), grps) map <- txt[seq(grps[start] + 1, grps[start + 1] - 1)] map <- trimws(gsub("\"", "", sub(" #.*", "", map))) pattern <- "(.*): (.*)" bioc <- package_version(sub(pattern, "\\1", map)) r <- package_version(sub(pattern, "\\2", map)) status <- rep("out-of-date", length(bioc)) release <- get_release_version() devel <- get_devel_version() status[bioc == release] <- "release" status[bioc == devel] <- "devel" # append final version for 'devel' R bioc <- c( bioc, max(bioc) ) r <- c(r, package_version(paste(unlist(max(r)) + 0:1, collapse = "."))) status <- c(status, "future") version_map <<- rbind( .VERSION_MAP_SENTINEL, data.frame( bioc_version = bioc, r_version = r, bioc_status = factor( status, levels = c("out-of-date", "release", "devel", "future") ) ) ) } version_map } get_matching_bioc_version <- function(r_version = getRversion(), forget = FALSE) { minor <- as.character(get_minor_r_version(r_version)) if (minor %in% names(builtin_map)) return(builtin_map[[minor]]) # If we are not in the map, then we need to look this up in # YAML data. map <- get_version_map(forget = forget) mine <- match(package_version(minor), map$r_version) if (!is.na(mine)) return(map$bioc_version[mine]) # If it is not even in the YAML, then it must be some very old # or very new version. If old, we fail. If new, we assume bioc-devel. if (package_version(minor) < "2.1") { stop("R version too old, cannot run Bioconductor") } get_devel_version() } get_bioc_version <- function(r_version = getRversion(), forget = FALSE) { if (nzchar(v <- Sys.getenv("R_BIOC_VERSION", ""))) { return(package_version(v)) } get_matching_bioc_version(r_version, forget = forget) } get_repos <- function(bioc_version = "auto", forget = FALSE) { if (identical(bioc_version, "auto")) { bioc_version <- get_bioc_version(getRversion(), forget) } else { bioc_version <- package_version(bioc_version) } mirror <- Sys.getenv("R_BIOC_MIRROR", "https://bioconductor.org") mirror <- getOption("BioC_mirror", mirror) repos <- c( BioCsoft = "{mirror}/packages/{bv}/bioc", BioCann = "{mirror}/packages/{bv}/data/annotation", BioCexp = "{mirror}/packages/{bv}/data/experiment", BioCworkflows = if (bioc_version >= "3.7") "{mirror}/packages/{bv}/workflows", BioCextra = if (bioc_version <= "3.5") "{mirror}/packages/{bv}/extra" ) ## It seems that if a repo is not available yet for bioc-devel, ## they redirect to the bioc-release version, so we do not need to ## parse devel_repos from the config.yaml file sub("{mirror}", mirror, fixed = TRUE, sub("{bv}", bioc_version, repos, fixed = TRUE)) } # ------------------------------------------------------------------- # Internals read_url <- function(url) { tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) suppressWarnings(download.file(url, tmp, quiet = TRUE)) if (!file.exists(tmp) || file.info(tmp)$size == 0) { stop("Failed to download `", url, "`") } readLines(tmp, warn = FALSE) } .VERSION_SENTINEL <- local({ version <- package_version(list()) class(version) <- c("unknown_version", class(version)) version }) .VERSION_MAP_SENTINEL <- data.frame( bioc_version = .VERSION_SENTINEL, r_version = .VERSION_SENTINEL, bioc_status = factor( factor(), levels = c("out-of-date", "release", "devel", "future") ) ) get_minor_r_version <- function (x) { package_version(x)[,1:2] } # ------------------------------------------------------------------- structure( list( .internal = environment(), get_yaml_config = get_yaml_config, set_yaml_config = set_yaml_config, get_release_version = get_release_version, get_devel_version = get_devel_version, get_version_map = get_version_map, get_matching_bioc_version = get_matching_bioc_version, get_bioc_version = get_bioc_version, get_repos = get_repos ), class = c("standalone_bioc", "standalone")) }) remotes/R/package.R0000644000176200001440000000057113243326354013657 0ustar liggesusers load_pkg_description <- function(path) { path <- normalizePath(path) if (!is_dir(path)) { dir <- tempfile() path_desc <- untar_description(path, dir = dir) on.exit(unlink(dir, recursive = TRUE)) } else { path_desc <- file.path(path, "DESCRIPTION") } desc <- read_dcf(path_desc) names(desc) <- tolower(names(desc)) desc$path <- path desc } remotes/R/install-svn.R0000644000176200001440000001246713476462151014551 0ustar liggesusers #' Install a package from a SVN repository #' #' This function requires \command{svn} to be installed on your system in order to #' be used. #' #' It is vectorised so you can install multiple packages with #' a single command. #' #' @inheritParams install_git #' @param subdir A sub-directory within a svn repository that contains the #' package we are interested in installing. #' @param args A character vector providing extra options to pass on to #' \command{svn}. #' @param revision svn revision, if omitted updates to latest #' @param ... Other arguments passed on to [utils::install.packages()]. #' @inheritParams install_github #' @family package installation #' @export #' #' @examples #' \dontrun{ #' install_svn("https://github.com/hadley/stringr/trunk") #' install_svn("https://github.com/hadley/httr/branches/oauth") #'} install_svn <- function(url, subdir = NULL, args = character(0), revision = NULL, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { remotes <- lapply(url, svn_remote, svn_subdir = subdir, revision = revision, args = args) install_remotes(remotes, args = args, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } svn_remote <- function(url, svn_subdir = NULL, revision = NULL, args = character(0), ...) { remote("svn", url = url, svn_subdir = svn_subdir, revision = revision, args = args ) } #' @export remote_download.svn_remote <- function(x, quiet = FALSE) { if (!quiet) { message("Downloading svn repo ", x$url) } bundle <- tempfile() svn_binary_path <- svn_path() url <- x$url args <- "co" if (!is.null(x$revision)) { args <- c(args, "-r", x$revision) } args <- c(args, x$args, full_svn_url(x), bundle) if (!quiet) { message(shQuote(svn_binary_path), " ", paste0(args, collapse = " ")) } request <- system2(svn_binary_path, args, stdout = FALSE, stderr = FALSE) # This is only looking for an error code above 0-success if (request > 0) { stop("There seems to be a problem retrieving this SVN-URL.", call. = FALSE) } in_dir(bundle, { if (!is.null(x$revision)) { request <- system2(svn_binary_path, paste("update -r", x$revision), stdout = FALSE, stderr = FALSE) if (request > 0) { stop("There was a problem switching to the requested SVN revision", call. = FALSE) } } }) bundle } #' @export remote_metadata.svn_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { if (!is.null(bundle)) { in_dir(bundle, { revision <- svn_revision() }) } else { revision <- sha } list( RemoteType = "svn", RemoteUrl = x$url, RemoteSubdir = x$svn_subdir, RemoteArgs = if (length(x$args) > 0) paste0(deparse(x$args), collapse = " "), RemoteSha = revision # for compatibility with other remotes ) } svn_path <- function(svn_binary_name = NULL) { # Use user supplied path if (!is.null(svn_binary_name)) { if (!file.exists(svn_binary_name)) { stop("Path ", svn_binary_name, " does not exist", .call = FALSE) } return(svn_binary_name) } # Look on path svn_path <- Sys.which("svn")[[1]] if (svn_path != "") return(svn_path) # On Windows, look in common locations if (os_type() == "windows") { look_in <- c( "C:/Program Files/Svn/bin/svn.exe", "C:/Program Files (x86)/Svn/bin/svn.exe" ) found <- file.exists(look_in) if (any(found)) return(look_in[found][1]) } stop("SVN does not seem to be installed on your system.", call. = FALSE) } #' @export remote_package_name.svn_remote <- function(remote, ...) { description_url <- file.path(full_svn_url(remote), "DESCRIPTION") tmp_file <- tempfile() on.exit(rm(tmp_file)) response <- system2(svn_path(), paste("cat", description_url), stdout = tmp_file) if (!identical(response, 0L)) { return(NA_character_) } read_dcf(tmp_file)$Package } #' @export remote_sha.svn_remote <- function(remote, ...) { svn_revision(full_svn_url(remote)) } svn_revision <- function(url = NULL, svn_binary_path = svn_path()) { request <- system2(svn_binary_path, paste("info --xml", url), stdout = TRUE) if (!is.null(attr(request, "status")) && !identical(attr(request, "status"), 0L)) { stop("There was a problem retrieving the current SVN revision", call. = FALSE) } gsub(".*.*", "\\1", paste(collapse = "\n", request)) } full_svn_url <- function(x) { url <- x$url if (!is.null(x$svn_subdir)) { url <- file.path(url, x$svn_subdir) } url } format.svn_remote <- function(x, ...) { "SVN" } remotes/R/install-github.R0000644000176200001440000002040413621300305015173 0ustar liggesusers#' Attempts to install a package directly from GitHub. #' #' This function is vectorised on `repo` so you can install multiple #' packages in a single command. #' #' @param repo Repository address in the format #' `username/repo[/subdir][@@ref|#pull]`. Alternatively, you can #' specify `subdir` and/or `ref` using the respective parameters #' (see below); if both is specified, the values in `repo` take #' precedence. #' @param ref Desired git reference. Could be a commit, tag, or branch #' name, or a call to [github_pull()]. Defaults to `"master"`. #' @param subdir subdirectory within repo that contains the R package. #' @param auth_token To install from a private repo, generate a personal #' access token (PAT) in "https://github.com/settings/tokens" and #' supply to this argument. This is safer than using a password because #' you can easily delete a PAT without affecting any others. Defaults to #' the `GITHUB_PAT` environment variable. #' @param host GitHub API host to use. Override with your GitHub enterprise #' hostname, for example, `"github.hostname.com/api/v3"`. #' @param force Force installation, even if the remote state has not changed #' since the previous install. #' @inheritParams install_deps #' @param ... Other arguments passed on to [utils::install.packages()]. #' @details #' If the repository uses submodules a command-line git client is required to #' clone the submodules. #' @family package installation #' @export #' @seealso [github_pull()] #' @examples #' \dontrun{ #' install_github("klutometis/roxygen") #' install_github("wch/ggplot2") #' install_github(c("rstudio/httpuv", "rstudio/shiny")) #' install_github(c("hadley/httr@@v0.4", "klutometis/roxygen#142", #' "mfrasca/r-logging/pkg")) #' #' # To install from a private repo, use auth_token with a token #' # from https://github.com/settings/tokens. You only need the #' # repo scope. Best practice is to save your PAT in env var called #' # GITHUB_PAT. #' install_github("hadley/private", auth_token = "abc") #' #' } install_github <- function(repo, ref = "master", subdir = NULL, auth_token = github_pat(quiet), host = "api.github.com", dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { remotes <- lapply(repo, github_remote, ref = ref, subdir = subdir, auth_token = auth_token, host = host) install_remotes(remotes, auth_token = auth_token, host = host, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } github_remote <- function(repo, ref = "master", subdir = NULL, auth_token = github_pat(), sha = NULL, host = "api.github.com", ...) { meta <- parse_git_repo(repo) meta <- github_resolve_ref(meta$ref %||% ref, meta, host = host, auth_token = auth_token) remote("github", host = host, package = meta$package, repo = meta$repo, subdir = meta$subdir %||% subdir, username = meta$username, ref = meta$ref, sha = sha, auth_token = auth_token ) } #' @export remote_download.github_remote <- function(x, quiet = FALSE) { if (!quiet) { message("Downloading GitHub repo ", x$username, "/", x$repo, "@", x$ref) } dest <- tempfile(fileext = paste0(".tar.gz")) src_root <- build_url(x$host, "repos", x$username, x$repo) src <- paste0(src_root, "/tarball/", utils::URLencode(x$ref, reserved = TRUE)) download(dest, src, auth_token = x$auth_token) } #' @export remote_metadata.github_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { if (!is.null(bundle)) { # Might be able to get from archive sha <- git_extract_sha1_tar(bundle) } else if (is_na(sha)) { sha <- NULL } list( RemoteType = "github", RemoteHost = x$host, RemotePackage = x$package, RemoteRepo = x$repo, RemoteUsername = x$username, RemoteRef = x$ref, RemoteSha = sha, RemoteSubdir = x$subdir, # Backward compatibility for packrat etc. GithubRepo = x$repo, GithubUsername = x$username, GithubRef = x$ref, GithubSHA1 = sha, GithubSubdir = x$subdir ) } #' GitHub references #' #' Use as `ref` parameter to [install_github()]. #' Allows installing a specific pull request or the latest release. #' #' @param pull The pull request to install #' @seealso [install_github()] #' @rdname github_refs #' @export github_pull <- function(pull) structure(pull, class = "github_pull") #' @rdname github_refs #' @export github_release <- function() structure(NA_integer_, class = "github_release") github_resolve_ref <- function(x, params, ...) UseMethod("github_resolve_ref") #' @export github_resolve_ref.default <- function(x, params, ...) { params$ref <- x params } #' @export github_resolve_ref.NULL <- function(x, params, ...) { params$ref <- "master" params } #' @export github_resolve_ref.github_pull <- function(x, params, ..., host, auth_token = github_pat()) { # GET /repos/:user/:repo/pulls/:number path <- file.path("repos", params$username, params$repo, "pulls", x) response <- tryCatch( github_GET(path, host = host, pat = auth_token), error = function(e) e ) ## Just because libcurl might download the error page... if (methods::is(response, "error") || is.null(response$head)) { stop("Cannot find GitHub pull request ", params$username, "/", params$repo, "#", x, "\n", response$message) } params$username <- response$head$user$login params$ref <- response$head$ref params } # Retrieve the ref for the latest release #' @export github_resolve_ref.github_release <- function(x, params, ..., host, auth_token = github_pat()) { # GET /repos/:user/:repo/releases path <- paste("repos", params$username, params$repo, "releases", sep = "/") response <- tryCatch( github_GET(path, host = host, pat = auth_token), error = function(e) e ) if (methods::is(response, "error") || !is.null(response$message)) { stop("Cannot find repo ", params$username, "/", params$repo, ".", "\n", response$message) } if (length(response) == 0L) stop("No releases found for repo ", params$username, "/", params$repo, ".") params$ref <- response[[1L]]$tag_name params } #' @export remote_package_name.github_remote <- function(remote, ..., use_local = TRUE, use_curl = !is_standalone() && pkg_installed("curl")) { # If the package name was explicitly specified, use that if (!is.null(remote$package)) { return(remote$package) } # Otherwise if the repo is an already installed package assume that. if (isTRUE(use_local)) { local_name <- suppressWarnings(utils::packageDescription(remote$repo, fields = "Package")) if (!is.na(local_name)) { return(local_name) } } # Otherwise lookup the package name from the remote DESCRIPTION file desc <- github_DESCRIPTION(username = remote$username, repo = remote$repo, subdir = remote$subdir, host = remote$host, ref = remote$ref, pat = remote$auth_token %||% github_pat(), use_curl = use_curl) if (is.null(desc)) { return(NA_character_) } tmp <- tempfile() writeChar(desc, tmp) on.exit(unlink(tmp)) read_dcf(tmp)$Package } #' @export remote_sha.github_remote <- function(remote, ..., use_curl = !is_standalone() && pkg_installed("curl")) { tryCatch( github_commit(username = remote$username, repo = remote$repo, host = remote$host, ref = remote$ref, pat = remote$auth_token %||% github_pat(), use_curl = use_curl), # 422 errors most often occur when a branch or PR has been deleted, so we # ignore the error in this case http_422 = function(e) NA_character_ ) } #' @export format.github_remote <- function(x, ...) { "GitHub" } remotes/R/deps.R0000644000176200001440000005012613621330472013214 0ustar liggesusers #' Find all dependencies of a CRAN or dev package. #' #' Find all the dependencies of a package and determine whether they are ahead #' or behind CRAN. A `print()` method identifies mismatches (if any) #' between local and CRAN versions of each dependent package; an #' `update()` method installs outdated or missing packages from CRAN. #' #' @param packages A character vector of package names. #' @param pkgdir path to a package directory, or to a package tarball. #' @param dependencies Which dependencies do you want to check? #' Can be a character vector (selecting from "Depends", "Imports", #' "LinkingTo", "Suggests", or "Enhances"), or a logical vector. #' #' `TRUE` is shorthand for "Depends", "Imports", "LinkingTo" and #' "Suggests". `NA` is shorthand for "Depends", "Imports" and "LinkingTo" #' and is the default. `FALSE` is shorthand for no dependencies (i.e. #' just check this package, not its dependencies). #' @param quiet If `TRUE`, suppress output. #' @param upgrade One of "default", "ask", "always", or "never". "default" #' respects the value of the `R_REMOTES_UPGRADE` environment variable if set, #' and falls back to "ask" if unset. "ask" prompts the user for which out of #' date packages to upgrade. For non-interactive sessions "ask" is equivalent #' to "always". `TRUE` and `FALSE` are also accepted and correspond to #' "always" and "never" respectively. #' @param repos A character vector giving repositories to use. #' @param type Type of package to `update`. #' #' @param object A `package_deps` object. #' @param ... Additional arguments passed to `install_packages`. #' @inheritParams install_github #' #' @return #' #' A `data.frame` with columns: #' #' \tabular{ll}{ #' `package` \tab The dependent package's name,\cr #' `installed` \tab The currently installed version,\cr #' `available` \tab The version available on CRAN,\cr #' `diff` \tab An integer denoting whether the locally installed version #' of the package is newer (1), the same (0) or older (-1) than the version #' currently available on CRAN.\cr #' } #' #' @export #' @examples #' \dontrun{ #' package_deps("devtools") #' # Use update to update any out-of-date dependencies #' update(package_deps("devtools")) #' } package_deps <- function(packages, dependencies = NA, repos = getOption("repos"), type = getOption("pkgType")) { repos <- fix_repositories(repos) cran <- available_packages(repos, type) deps <- find_deps(packages, available = cran, top_dep = dependencies) # Remove base packages inst <- utils::installed.packages() base <- unname(inst[inst[, "Priority"] %in% c("base", "recommended"), "Package"]) deps <- setdiff(deps, base) # get remote types remote <- structure(lapply(deps, package2remote, repos = repos, type = type), class = "remotes") inst_ver <- vapply(deps, local_sha, character(1)) cran_ver <- vapply(remote, function(x) remote_sha(x), character(1)) is_cran_remote <- vapply(remote, inherits, logical(1), "cran_remote") diff <- compare_versions(inst_ver, cran_ver, is_cran_remote) res <- structure( data.frame( package = deps, installed = inst_ver, available = cran_ver, diff = diff, is_cran = is_cran_remote, stringsAsFactors = FALSE ), class = c("package_deps", "data.frame") ) res$remote <- remote res } #' `local_package_deps` extracts dependencies from a #' local DESCRIPTION file. #' #' @export #' @rdname package_deps local_package_deps <- function(pkgdir = ".", dependencies = NA) { pkg <- load_pkg_description(pkgdir) dependencies <- tolower(standardise_dep(dependencies)) dependencies <- intersect(dependencies, names(pkg)) parsed <- lapply(pkg[tolower(dependencies)], parse_deps) unlist(lapply(parsed, `[[`, "name"), use.names = FALSE) } #' `dev_package_deps` lists the status of the dependencies #' of a local package. #' #' @export #' @rdname package_deps dev_package_deps <- function(pkgdir = ".", dependencies = NA, repos = getOption("repos"), type = getOption("pkgType")) { pkg <- load_pkg_description(pkgdir) repos <- c(repos, parse_additional_repositories(pkg)) deps <- local_package_deps(pkgdir = pkgdir, dependencies = dependencies) if (is_bioconductor(pkg)) { bioc_repos <- bioc_install_repos() missing_repos <- setdiff(names(bioc_repos), names(repos)) if (length(missing_repos) > 0) repos[missing_repos] <- bioc_repos[missing_repos] } combine_deps( package_deps(deps, repos = repos, type = type), remote_deps(pkg)) } combine_deps <- function(cran_deps, remote_deps) { # If there are no dependencies there will be no remote dependencies either, # so just return them (and don't force the remote_deps promise) if (nrow(cran_deps) == 0) { return(cran_deps) } # Only keep the remotes that are specified in the cran_deps or are NA remote_deps <- remote_deps[is.na(remote_deps$package) | remote_deps$package %in% cran_deps$package, ] # If there are remote deps remove the equivalent CRAN deps cran_deps <- cran_deps[!(cran_deps$package %in% remote_deps$package), ] rbind(remote_deps, cran_deps) } ## -2 = not installed, but available on CRAN ## -1 = installed, but out of date ## 0 = installed, most recent version ## 1 = installed, version ahead of CRAN ## 2 = package not on CRAN compare_versions <- function(inst, remote, is_cran) { stopifnot(length(inst) == length(remote) && length(inst) == length(is_cran)) compare_var <- function(i, c, cran) { if (!cran) { if (identical(i, c)) { return(CURRENT) } else { return(BEHIND) } } if (is.na(c)) return(UNAVAILABLE) # not on CRAN if (is.na(i)) return(UNINSTALLED) # not installed, but on CRAN i <- package_version(i) c <- package_version(c) if (i < c) { BEHIND # out of date } else if (i > c) { AHEAD # ahead of CRAN } else { CURRENT # most recent CRAN version } } vapply(seq_along(inst), function(i) compare_var(inst[[i]], remote[[i]], is_cran[[i]]), integer(1)) } has_dev_remotes <- function(pkg) { !is.null(pkg[["remotes"]]) } #' @export print.package_deps <- function(x, show_ok = FALSE, ...) { class(x) <- "data.frame" x$remote <-lapply(x$remote, format) ahead <- x$diff > 0L behind <- x$diff < 0L same_ver <- x$diff == 0L x$diff <- NULL x[] <- lapply(x, format_str, width = 12) if (any(behind)) { cat("Needs update -----------------------------\n") print(x[behind, , drop = FALSE], row.names = FALSE, right = FALSE) } if (any(ahead)) { cat("Not on CRAN ----------------------------\n") print(x[ahead, , drop = FALSE], row.names = FALSE, right = FALSE) } if (show_ok && any(same_ver)) { cat("OK ---------------------------------------\n") print(x[same_ver, , drop = FALSE], row.names = FALSE, right = FALSE) } } ## -2 = not installed, but available on CRAN ## -1 = installed, but out of date ## 0 = installed, most recent version ## 1 = installed, version ahead of CRAN ## 2 = package not on CRAN UNINSTALLED <- -2L BEHIND <- -1L CURRENT <- 0L AHEAD <- 1L UNAVAILABLE <- 2L #' @export #' @rdname package_deps #' @importFrom stats update update.package_deps <- function(object, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { object <- upgradable_packages(object, upgrade, quiet) unavailable_on_cran <- object$diff == UNAVAILABLE & object$is_cran unknown_remotes <- (object$diff == UNAVAILABLE | object$diff == UNINSTALLED) & !object$is_cran if (any(unavailable_on_cran) && !quiet) { message("Skipping ", sum(unavailable_on_cran), " packages not available: ", paste(object$package[unavailable_on_cran], collapse = ", ")) } if (any(unknown_remotes)) { install_remotes(object$remote[unknown_remotes], dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } ahead_of_cran <- object$diff == AHEAD & object$is_cran if (any(ahead_of_cran) && !quiet) { message("Skipping ", sum(ahead_of_cran), " packages ahead of CRAN: ", paste(object$package[ahead_of_cran], collapse = ", ")) } ahead_remotes <- object$diff == AHEAD & !object$is_cran if (any(ahead_remotes)) { install_remotes(object$remote[ahead_remotes], dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } behind <- is.na(object$installed) | object$diff < CURRENT if (any(object$is_cran & !unavailable_on_cran & behind)) { # get the first cran-like remote and use its repos and pkg_type r <- object$remote[object$is_cran & behind][[1]] install_packages(object$package[object$is_cran & behind], repos = r$repos, type = r$pkg_type, dependencies = dependencies, quiet = quiet, ...) } install_remotes(object$remote[!object$is_cran & behind], dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) invisible() } install_packages <- function(packages, repos = getOption("repos"), type = getOption("pkgType"), ..., dependencies = FALSE, quiet = NULL) { # We want to pass only args that exist in the downstream functions args_to_keep <- unique( names( c( formals(utils::install.packages), formals(utils::download.file) ) ) ) args <- list(...) args <- args[names(args) %in% args_to_keep] if (is.null(quiet)) quiet <- !identical(type, "source") message("Installing ", length(packages), " packages: ", paste(packages, collapse = ", ")) do.call( safe_install_packages, c(list( packages, repos = repos, type = type, dependencies = dependencies, quiet = quiet ), args ) ) } find_deps <- function(packages, available = available_packages(), top_dep = TRUE, rec_dep = NA, include_pkgs = TRUE) { if (length(packages) == 0 || identical(top_dep, FALSE)) return(character()) top_dep <- standardise_dep(top_dep) rec_dep <- standardise_dep(rec_dep) top <- tools::package_dependencies(packages, db = available, which = top_dep) top_flat <- unlist(top, use.names = FALSE) if (length(rec_dep) != 0 && length(top_flat) > 0) { rec <- tools::package_dependencies(top_flat, db = available, which = rec_dep, recursive = TRUE) rec_flat <- unlist(rec, use.names = FALSE) } else { rec_flat <- character() } # We need to put the recursive dependencies _before_ the top dependencies, to # ensure that any dependencies are installed before their parents are loaded. unique(c(if (include_pkgs) packages, rec_flat, top_flat)) } #' Standardise dependencies using the same logical as [install.packages] #' #' @param x The dependencies to standardise. #' A character vector (selecting from "Depends", "Imports", #' "LinkingTo", "Suggests", or "Enhances"), or a logical vector. #' #' `TRUE` is shorthand for "Depends", "Imports", "LinkingTo" and #' "Suggests". `NA` is shorthand for "Depends", "Imports" and "LinkingTo" #' and is the default. `FALSE` is shorthand for no dependencies. #' #' @seealso for #' additional information on what each dependency type means. #' @keywords internal #' @export standardise_dep <- function(x) { if (identical(x, NA)) { c("Depends", "Imports", "LinkingTo") } else if (isTRUE(x)) { c("Depends", "Imports", "LinkingTo", "Suggests") } else if (identical(x, FALSE)) { character(0) } else if (is.character(x)) { x } else { stop("Dependencies must be a boolean or a character vector", call. = FALSE) } } #' Update packages that are missing or out-of-date. #' #' Works similarly to [utils::install.packages()] but doesn't install packages #' that are already installed, and also upgrades out dated dependencies. #' #' @param packages Character vector of packages to update. #' @inheritParams install_github #' @seealso [package_deps()] to see which packages are out of date/ #' missing. #' @export #' @examples #' \dontrun{ #' update_packages("ggplot2") #' update_packages(c("plyr", "ggplot2")) #' } update_packages <- function(packages = TRUE, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { if (isTRUE(packages)) { packages <- utils::installed.packages()[, "Package"] } pkgs <- package_deps(packages, repos = repos, type = type) update(pkgs, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } has_additional_repositories <- function(pkg) { "additional_repositories" %in% names(pkg) } parse_additional_repositories <- function(pkg) { if (has_additional_repositories(pkg)) { strsplit(trim_ws(pkg[["additional_repositories"]]), "[,[:space:]]+")[[1]] } } fix_repositories <- function(repos) { if (length(repos) == 0) repos <- character() # Override any existing default values with the cloud mirror # Reason: A "@CRAN@" value would open a GUI for choosing a mirror repos[repos == "@CRAN@"] <- download_url("cloud.r-project.org") repos } parse_one_remote <- function(x, ...) { pieces <- strsplit(x, "::", fixed = TRUE)[[1]] if (length(pieces) == 1) { type <- "github" repo <- pieces } else if (length(pieces) == 2) { type <- pieces[1] repo <- pieces[2] } else { stop("Malformed remote specification '", x, "'", call. = FALSE) } tryCatch({ # We need to use `environment(sys.function())` instead of # `asNamespace("remotes")` because when used as a script in # install-github.R there is no remotes namespace. fun <- get(paste0(tolower(type), "_remote"), envir = environment(sys.function()), mode = "function", inherits = FALSE) res <- fun(repo, ...) }, error = function(e) stop("Unknown remote type: ", type, "\n ", conditionMessage(e), call. = FALSE) ) res } split_remotes <- function(x) { pkgs <- trim_ws(unlist(strsplit(x, ",[[:space:]]*"))) if (any((res <- grep("[[:space:]]+", pkgs)) != -1)) { stop("Missing commas separating Remotes: '", pkgs[res], "'", call. = FALSE) } pkgs } package_deps_new <- function(package = character(), installed = character(), available = character(), diff = logical(), is_cran = logical(), remote = list()) { res <- structure( data.frame(package = package, installed = installed, available = available, diff = diff, is_cran = is_cran, stringsAsFactors = FALSE), class = c("package_deps", "data.frame") ) res$remote = structure(remote, class = "remotes") res } remote_deps <- function(pkg) { if (!has_dev_remotes(pkg)) { return(package_deps_new()) } dev_packages <- split_remotes(pkg[["remotes"]]) remote <- lapply(dev_packages, parse_one_remote) package <- vapply(remote, function(x) remote_package_name(x), character(1), USE.NAMES = FALSE) installed <- vapply(package, function(x) local_sha(x), character(1), USE.NAMES = FALSE) available <- vapply(remote, function(x) remote_sha(x), character(1), USE.NAMES = FALSE) diff <- installed == available diff <- ifelse(!is.na(diff) & diff, CURRENT, BEHIND) diff[is.na(installed)] <- UNINSTALLED package_deps_new(package, installed, available, diff, is_cran = FALSE, remote) } # interactive is an argument to make testing easier. resolve_upgrade <- function(upgrade, is_interactive = interactive()) { if (isTRUE(upgrade)) { upgrade <- "always" } else if (identical(upgrade, FALSE)) { upgrade <- "never" } upgrade <- match.arg(upgrade[[1]], c("default", "ask", "always", "never")) if (identical(upgrade, "default")) upgrade <- Sys.getenv("R_REMOTES_UPGRADE", unset = "ask") if (!is_interactive && identical(upgrade, "ask")) { upgrade <- "always" } upgrade } upgradable_packages <- function(x, upgrade, quiet, is_interactive = interactive()) { uninstalled <- x$diff == UNINSTALLED behind <- x$diff == BEHIND switch(resolve_upgrade(upgrade, is_interactive = is_interactive), always = { return(msg_upgrades(x, quiet)) }, never = return(x[uninstalled, ]), ask = { if (!any(behind)) { return(x) } pkgs <- format_upgrades(x[behind, ]) choices <- pkgs if (length(choices) > 0) { choices <- c("All", "CRAN packages only", "None", choices) } res <- select_menu(choices, title = "These packages have more recent versions available.\nIt is recommended to update all of them.\nWhich would you like to update?") if ("None" %in% res || length(res) == 0) { return(x[uninstalled, ]) } if ("All" %in% res) { wch <- seq_len(NROW(x)) } else { if ("CRAN packages only" %in% res) { wch <- uninstalled | (behind & x$is_cran) } else { wch <- sort(c(which(uninstalled), which(behind)[pkgs %in% res])) } } msg_upgrades(x[wch, ], quiet) } ) } select_menu <- function(choices, title = NULL, msg = "Enter one or more numbers, or an empty line to skip updates:", width = getOption("width")) { if (!is.null(title)) { cat(title, "\n", sep = "") } nc <- length(choices) op <- paste0(format(seq_len(nc)), ": ", choices) fop <- format(op) cat("", fop, "", sep = "\n") repeat { cat(msg, "\n", sep = "") answer <- readLines(n = 1) answer <- strsplit(answer, "[ ,]+")[[1]] if (all(answer %in% seq_along(choices))) { return(choices[as.integer(answer)]) } } } msg_upgrades <- function(x, quiet) { if (isTRUE(quiet) || nrow(x) == 0) { return(invisible(x)) } cat(format_upgrades(x[x$diff <= BEHIND, ]), sep = "\n") invisible(x) } format_upgrades <- function(x) { if (nrow(x) == 0) { return(character(0)) } remote_type <- lapply(x$remote, format) # This call trims widths to 12 characters x[] <- lapply(x, format_str, width = 12) # This call aligns the columns x[] <- lapply(x, format, trim = FALSE, justify = "left") pkgs <- paste0(x$package, " (", x$installed, " -> ", x$available, ") ", "[", remote_type, "]") pkgs } remotes/R/decompress.R0000644000176200001440000000413613502707776014442 0ustar liggesusers# Decompress pkg, if needed source_pkg <- function(path, subdir = NULL) { if (!dir.exists(path)) { bundle <- path outdir <- tempfile(pattern = "remotes") dir.create(outdir) path <- decompress(path, outdir) } else { bundle <- NULL } pkg_path <- if (is.null(subdir)) path else file.path(path, subdir) # Check it's an R package if (!file.exists(file.path(pkg_path, "DESCRIPTION"))) { stop("Does not appear to be an R package (no DESCRIPTION)", call. = FALSE) } # Check configure is executable if present config_path <- file.path(pkg_path, "configure") if (file.exists(config_path)) { Sys.chmod(config_path, "777") } pkg_path } decompress <- function(src, target) { stopifnot(file.exists(src)) if (grepl("\\.zip$", src)) { my_unzip(src, target) outdir <- getrootdir(as.vector(utils::unzip(src, list = TRUE)$Name)) } else if (grepl("\\.(tar|tar\\.gz|tar\\.bz2|tgz|tbz)$", src)) { untar(src, exdir = target) outdir <- getrootdir(untar(src, list = TRUE)) } else { ext <- gsub("^[^.]*\\.", "", src) stop("Don't know how to decompress files with extension ", ext, call. = FALSE) } file.path(target, outdir) } # Returns everything before the last slash in a filename # getdir("path/to/file") returns "path/to" # getdir("path/to/dir/") returns "path/to/dir" getdir <- function(path) sub("/[^/]*$", "", path) # Given a list of files, returns the root (the topmost folder) # getrootdir(c("path/to/file", "path/to/other/thing")) returns "path/to" # It does not check that all paths have a common prefix. It fails for # empty input vector. It assumes that directories end with '/'. getrootdir <- function(file_list) { stopifnot(length(file_list) > 0) slashes <- nchar(gsub("[^/]", "", file_list)) if (min(slashes) == 0) return(".") getdir(file_list[which.min(slashes)]) } my_unzip <- function(src, target, unzip = getOption("unzip", "internal")) { if (unzip %in% c("internal", "")) { return(utils::unzip(src, exdir = target)) } args <- paste( "-oq", shQuote(src), "-d", shQuote(target) ) system_check(unzip, args) } remotes/NEWS.md0000644000176200001440000003037413621321207013032 0ustar liggesusers# remotes 2.1.1 ## Minor improvements and fixes * Installing mixed binary and source dependencies when the latest versions of some packages do not have binaries yet should now install dependencies in the correct order to prevent load failures (#296) * `github_error()` now also works when a GitHub (Enterprise) server does not return information about the rate limit (@dpprdan, #396, #413). * `install_gitlab` passes the `quiet` argument on to `gitlab_pat` (@michaelchirico, #437) * `remotes` is now resilient against installed packages that declare `RemoteType: standard` but do not include a `RemoteRepos` or `RemotePkgType` field. In such a case, the values for `getOption("repos")` and `getOption("pkgType")` will be used (respectively). * `install_gitlab()` now installs from repositories in subgroups and with dots in their name. `subdir` is now an explicit argument instead of implicit in `repo` (@robertdj, #259, #420). * `install()` now passes the ellipsis `...` to `install_deps()` (@Neil-Schneider, #411) * The tests have been updated to work with newer versions of callr and R 4.0 # remotes 2.1.0 ## New features * `install_*()` functions gain `build_manual` and `build_vignette` arguments that previously existed in devtools versions < 2.0 (#353). * The interactive menu has been modified to provide more clear instructions on the skipping behavior (#207) * Credentials are now passed via HTTP headers, to reduce exposure when requests fail (#391). ## Minor improvements and fixes * `download()` with the external curl download method now always uses `-L` to follow redirects. (#350) * `update_packages()` now has a more informative error message when the update fails (#223, #232) * `install_git()` now can take credentials from the global option `remotes.git_credentials` (#378). * `install_git()` now works with SHA references and external git (#389). * GitHub remotes that point to branches no longer fail when the branch is later deleted (#274). * Local remotes whose original location has been moved will no longer error when updating (#370). * `update_deps()` no longer sorts the dependencies alphabetically (#296, #301) * `github_resolve_ref()` now takes a `host` parameter (#284) * Remotes specific environment variables now accept 0 and 1 as valid values (#238) * remotes now uses locking by default when installing binary packages, which avoids issues when installing binaries that are already open in other R processes (#368) * `update_deps()` no longer fails if a local package no longer exists (#289) * `install_version()` now errors with a more informative message when `type` is not 'source' (#323) * Bioc `remote_sha()` now always returns a character result (#379) * Fix API call for private repositories in `install_gitlab` (@aornugent, #359, #363) * git submodules now work if the submodule file is empty (@muschellij2, #234) * git submodules now work if the R package is stored in a subfolder (@pommedeterresautee, #233) * `install_gitlab()` no longer adds the access token twice to the request (@aornugent, #363). * Bitbucket dependencies now actually use the `BITBUCKET_USER` and `BITBUCKET_PASSWORD` environment variables (@antoine-sachet, #347). * `parse_deps()` now ignores trailing whitespaces around comparison operators in DESCRIPTION fields (@LiNk-NY, #366) # remotes 2.0.4 * `update.package_dependencies()` now uses the pkg_type for the cran remote rather than a global type attribute, fixing errors when this global attribute is lost (#291, #304). * Credentials are no longer passed to dependencies, as this breaks dependencies which use different credentials or hosts. If you have relied on this behavior a more robust way to provide the credentials is using the appropriate environment variables, e.g. `GITHUB_PAT`, `BITBUCKET_USER` etc. (@antoine-sachet, #345). * The hash of bitbucket hosts is now correctly retrieved (@antoine-sachet, #344) * Fix parsing of Additional_Repositories which have a leading newline (@tmelliott, #251). # remotes 2.0.3 * The order of choices for `upgrade = "ask"` now puts the stable ones 'All', 'CRAN only', 'none' first, so they always have the same numbers (#287). * `update_submodules()` now works with empty .gitmodules files (@jsilve24, #329). * remotes now understands the "standard" remote type, as produced by packages installed from CRAN using `pak` (#309) * `install_dev()` now supports ref/pull format, e.g. `install_dev('shiny@v1.2-rc')` (@mkearney, #279). * Fix return type of `install_remote()` when there is a circular dependency (#225) * `remote_package_name.github_remote()` now works properly on Windows (#248) * `install_bioc()` repositories now updated for the Bioconductor 3.8 release. (#239) * `install_*` functions now set the `R_LIBS*` environment variables for child processes correctly on Windows (@HenrikBengtsson, #253) * `install_*` functions now support the `R_REMOTES_UPGRADE` environment variable, to set the default for the `upgrade` argument. See README for details (@kevinushey, #240). * `install_*` functions perform basic HTTP authentication using HTTP headers now. This fixes an issue with `install_bitbucket()` and private repos (#255). * `install_*` functions now respect the `download.file.method` option, if `download_file()` is used for HTTP. * `install_*` functions now use the _libcurl_ method, if the `download.file.method` option is not set to a different one, and libcurl is available. Before, the _wininet_ method was preferred on Windows. If you rely on the proxy configuration of _wininet_, then you might want to set the `download.file.method` option, or use another way to set up proxies, see `?download.file`. * Remotes without package names are now unconditionally installed (#246). * `install_github()` now includes a more informative error message when the status code is 404, asking the user to check that they have spelled the repo owner and repo correctly (included in the error message), and that they have the required permissions to access the repository. * `install_*` functions (via the underlying private `install` function) now set `RGL_USE_NULL="TRUE"` in order to avoid errors when running headless and installing any package using `rgl` (@jefferis, ##333) # remotes 2.0.2 * `install_deps()` now installs un-installed remotes packages even when `upgrade = "never"` (@ankane, #227) * `install_version()` now removes metadata added as a byproduct of using `install_url()` internally() (#224) * `install()` now avoids converting warnings to errors if `R_REMOTES_NO_ERRORS_FROM_WARNINGS` is unset and `_R_CHECK_FORCE_SUGGESTS_=false`. This avoids failures due to Suggested packages potentially being missing. * `install_bitbucket()` now works properly with packages in subdirectories (#220) * `install_deps()` now installs un-installed packages even when `upgrade = "never"` (#218) # remotes 2.0.1 * `install_github()` now bundles a GitHub PAT which is used on Travis to avoid hitting the rate limit too frequently. It also contains a more informative error message when the rate limit is hit on how to setup a GitHub personal access token. * The dialog when `upgrade = "ask"` now has a 'CRAN only' option, to update only CRAN packages. * No longer include project specific .Rprofile code in the temporary .Rprofile when `R_REMOTES_NO_ERRORS_FROM_WARNINGS=false` (the default). * `update.package_deps()` no longer prompts to install uninstalled dependencies, they are always installed (#203). * `available_packages()`, `available_packages_set()` and `available_packges_reset()` added to allow caching of the `available.packages()` database. # remotes 2.0.0 ## Breaking changes * `install_github()`'s previously deprecated `username` argument has been removed. (#142) * `install_deps()`'s `threads` argument has been removed, use the `Ncpus` argument instead (#153, #154) * `install_git()`'s `branch` argument has been renamed to `ref` and `branch` has been deprecated. ## New features * remotes now builds packages by default before installing them. This step uses the pkgbuild package, if available. If not, it calls `R CMD build` directly. * New `install_dev()` to install the development version of a CRAN package, based on the URL and BugReports fields in the DESCRIPTION file (#144). * `install_()*` functions now temporally put Rtools on the PATH when necessary, as long as the pkgbuild package is installed. * remotes can be forced to use only its internal code by setting the environment variable `R_REMOTES_STANDALONE` = "true". This is useful when installing optional dependencies of remotes on Windows, such as curl or git2r (#147) * When installing, remotes now errors on warnings, to catch cases where packages are only partially installed. This often happens on windows when the package dll is opened in another R process (#113). * `install_()` functions now pass arguments, including authentication information and upgrade down to dependencies (#53, #86, #87). * `install_()` functions allow the selection of a subset of packages to upgrade, in interactive mode, when `upgrade = "ask"`. * `install_git()` now supports passing credentials, when it is used with `git = "git2r"` (#106) * `install_()` functions now return the name of the package(s) which were installed (#55). * git submodules are now installed if they exist and a git client is available (#138, #133, #103, #82). * New `install_gitlab()` and `install_bioc()` functions, to install `gitlab` and `bioc` remote types. * remotes now uses the same SHA updating logic for remotes as devtools, including checking if the SHA of the remote has changed since the last installation. (#135) * `install_url()` can now install package binaries on windows (r-lib/devtools#1765) ## Minor improvements and fixes * `install_deps()` et al. now do not rewrite the `type` argument from `both` to `binary` to allow falling back to `source`. This fixes various installation failures. * remotes now looks up GitHub package names locally, if possible, and uses the GitHub REST API (if the curl package is available, and not in standalone mode). This makes the remote lookup about 10x faster when the remote package has not changed since the last install. * Using a GITHUB_PAT no longer prints diagnostic messages by default (r-lib/devtools#1752). * remotes now always uses https URLs for R versions that support them (@ankane, #139) * Do not include the BioCextra repository in versions after it was deprecated (R 3.5+, Bioc 3.6+). * `install_()` functions now download tarballs (.tar.gz) files rather than zip archives (.zip). This results in generally smaller files and avoids issues with script permissions being lost and strange behavior of some external unzip programs on Windows (#96). * Dependency parsing is now more robust to whitespace around the dependency specifications (#73). * `standardise_dep()` exported, for use in devtools. * `install_local()` now defaults to the current directory. * `install_bitbucket()` now correctly supports authentication, and the `subdir` argument. * `install_()` functions give a helpful warning when the package has long path names, on Windows. In this case building the package usually fails. (#84, #178). * `install_()` functions have now a more robust way of handling various tar programs on Windows (#172). * `install_()` functions now give a helpful warning on older R versions, on Windows, if `R.home()` contains a space character. Installation usually fails in this case. * GitHub API errors now give better error messages, including data about the API rate limits. # remotes 1.1.1 * Accept HTTPS, SSH, or various browser URLs in GitHub repo specification, @jennybc, #90, #109, #112 # remotes 1.1.0 * URL encode GitHub references, to allow installing from non-alphanumeric branch or tags, @krlmlr #38 * Better cooperation with proxy servers, and better download method selection on Windows, @cderv, #45, #46 * `install_deps()` supports tar balls, #47 * Allow training slash in GitHub repo specification, #54 * Work around on some Linux systems, where unzip is set to the empty string, @HenrikBengtsson, #57 * Check for circular dependencies while installing, #31 * Updated Bioconductor repo URLs for newer BioC versions # remotes 1.0.0 First public release. remotes/MD50000644000176200001440000001505313622037662012253 0ustar liggesusers07aa7db8d662845ec1941d8b462a0bfe *DESCRIPTION 89028777410872d6c2f812ad201795df *NAMESPACE 0deb3d5f097c7fbfc025028061426304 *NEWS.md c69cc47e03ce3e99cf507a1f468f9981 *R/bioc-standalone.R ce7ffc509f82357b35bd68b09c723d2b *R/bioc.R e99c497edc8defd1d3ec219a5a59a8a3 *R/circular.R 9d9ca0b83827953c41ff530e6cbd4787 *R/cran.R c0072fd0d4e37ef44c3dff3a6324d3a9 *R/dcf.R 4f8ffeae361f927119a1ac8c4112bb40 *R/decompress.R 8a1d6794eb74e65284476ed3af6427aa *R/deps.R 68574532404a2be400bcf9afa83e547d *R/devel.R 07771abe3cbc39c0bba13ea57d6211b1 *R/download.R 136cb1c0fb2c31d40979345dece0ed20 *R/git.R 476908e2f2927932e16cfeae5bb429b6 *R/github.R e9d6c249a208b6045aa3cf375c66fdc5 *R/install-bioc.R 19d7891ff89c4451edbbcc6f6d11f5ab *R/install-bitbucket.R da4419bab277686995fa0051d8d9d09f *R/install-cran.R b1968a0bd90a3ce171180882ebe46612 *R/install-dev.R e6cfafc1abbb4c007cbff0e7e7f355dd *R/install-git.R ecb8b621097b26cb8129694b8f33f404 *R/install-github.R 97d921f1cb0d95b3cbc52080795ac4c9 *R/install-gitlab.R e85b5582822675844ac327df81aebd7c *R/install-local.R d7790dde4400f77afef3ada8d453f9ce *R/install-remote.R 77a5d4068534194eef8000f8836da175 *R/install-svn.R 03c8a5d7c974368febadc5f97a7c80ee *R/install-url.R b90c262644d6026eab41474149a1c80e *R/install-version.R e400fb7e59bfbfd3d4ee017b62c14770 *R/install.R 19285a28eff3b86975f00799a3636a92 *R/json.R 2debb4956d79a8a2c752f8cdfc3957bd *R/package-deps.R 9ce31d85e99a412fbd6846818e89e604 *R/package.R 83ee6b8a9db8266a77d8af8b381c3179 *R/parse-git.R 670d696d1b64b89bb060fff6fb86e023 *R/submodule.R 80225ac0c70ae343816672fccce651e1 *R/system.R 21a4945721f9cadea520248e8b3a4c5f *R/utils.R 29baf33645ecea703afb8e31d4b2bc16 *README.md a489b05676b7c5add478228f1fe856e0 *build/vignette.rds 458664f52739e0f183ec71ab2e2fb363 *inst/doc/dependencies.Rmd 98ac30de48a9c62454214c4a29511150 *inst/doc/dependencies.html eddf8d2f7c28ca9218b240714f5e26c0 *inst/install-github.R 835a5c1138d08ebfdacbcf9546050646 *inst/install-github.Rin b8a5cc7f7a31075f9eab27198eaa80c6 *man/available_packages.Rd 60ec577226781160148f910487eaa456 *man/bioc_install_repos.Rd 4f910c5c0dcd8d127bb23977310b0287 *man/download.Rd 06e1c74c2f341321fd45678ad86410fb *man/download_version.Rd 2ec27d9015458bf65549d7f42f60a7fa *man/git_credentials.Rd 30ed3d4d9f74b396713fa924eb84aa54 *man/github_refs.Rd 3b6df194bccf02a5bfe5fada00ec4698 *man/gitlab_pat.Rd 01ea595d5ce37313fba9e661bc9baea6 *man/install_bioc.Rd a9082018714b01c32bef24fbd7ad6b5a *man/install_bitbucket.Rd 2d66d9b9f58176444751719185699380 *man/install_cran.Rd 9e4f668445ed24432ea78fd4a54b3604 *man/install_deps.Rd 01739b2f4c6d26fd5752d23d6924431e *man/install_dev.Rd 6f4a081dcd279d1bfb444570711500d7 *man/install_git.Rd 203b080dbef06de53035ac186593ccd2 *man/install_github.Rd 45507f04993a3c5e278c1e744578c3ba *man/install_gitlab.Rd 422bd25cdbcd9a2d85cba05b9a75bc67 *man/install_local.Rd b090a52707b9b2f1a74675bf5b7cae30 *man/install_svn.Rd 2ada7dcc706a776bdce58d7ad13781c9 *man/install_url.Rd a66379534fda3e430c843ba53d38ac0b *man/install_version.Rd 0a90e572c72cb469ec6edaf60daee804 *man/package_deps.Rd f02684918deaa642898daa03013f3519 *man/parse-git-repo.Rd 917a7a505b1c4afb81d2b0864cc94b93 *man/standardise_dep.Rd daa83bb62f62c099d36a03a74356ee92 *man/update_packages.Rd 9227416e75394195e4bec112cbabf0ec *tests/testthat.R 2bd668feb4c35832600369ffdefaafdb *tests/testthat/Biobase/DESCRIPTION 10f0406cb2932f8dd2f590a4a8ef914d *tests/testthat/MASS/DESCRIPTION 5eea495de275efecfcf2f7580bcac502 *tests/testthat/archives/foo.tar 358eb855f52aa4df03c10afa05128cf8 *tests/testthat/archives/foo.tar.bz2 388f01616a7de758cd9cd3424ae58489 *tests/testthat/archives/foo.tar.gz 358eb855f52aa4df03c10afa05128cf8 *tests/testthat/archives/foo.tbz 0287fa3248eceb208505b168c768f840 *tests/testthat/archives/foo.tgz 0f14e5f58cfd0a7693858d6f85b03ef7 *tests/testthat/archives/foo.zip 91bed546bea77f2a00f0ae88b3efcee2 *tests/testthat/archives/foo/DESCRIPTION b7dbe1170cee5a36267b3ce8c2b66cb2 *tests/testthat/archives/foo/R/foo.R d41d8cd98f00b204e9800998ecf8427e *tests/testthat/archives/foo/configure 74cf6c0f9b64cdb3ffd52b5789404b1c *tests/testthat/github-error-local.txt 40772d6bf83a55c074d69945b3ecf993 *tests/testthat/github-error-travis.txt 3db6b8e877bcd93b197269e3f774c1d3 *tests/testthat/helper.R bdaced33278228c95e9a292d02c8254c *tests/testthat/invalidpkg/DESCRIPTION 974048d625e58cb4dda8a71b44ab7e64 *tests/testthat/noremotes/DESCRIPTION 2b290b120d1010fb1352f73e3b242e1d *tests/testthat/submodule/DESCRIPTION 5c2e97156ee87d91b7add2df363c2318 *tests/testthat/submodule/NAMESPACE db433608622737ad580187e2e56a73e8 *tests/testthat/test-bioc.R f3e6696594b36f383a5fb95f0b08a8a6 *tests/testthat/test-cran.R 3c36f40435ba1b7fd6e0122c20eac356 *tests/testthat/test-dcf.R 6054aeb74cec6e9b620434f75ec2186e *tests/testthat/test-decompress.R 84272aa64f9f6b485864d28f0c71d3a9 *tests/testthat/test-deps.R aac0c955b38e1701cd3ae9ec5831166b *tests/testthat/test-devel.R 3d80b1856ba016832f0a516d179ccba1 *tests/testthat/test-download.R 6f369d8f22089c43c25d370e51482622 *tests/testthat/test-git.R 82dbcbd549698cf706e4be00019c8133 *tests/testthat/test-github.R 9c324725475f4c796def52037046daab *tests/testthat/test-install-bioc.R 904d2256b1661807ae2d1ca2a7c39dd0 *tests/testthat/test-install-bitbucket.R df38e7b929577a63a4a218bc1803cb53 *tests/testthat/test-install-cran.R 03f5412295d4458c2b889134e7c5bbe5 *tests/testthat/test-install-deps.R 139ef5585c68aeabcbdb025560ecfbfb *tests/testthat/test-install-dev.R 6ea7aa45599a3eb33532e7ddc4140c10 *tests/testthat/test-install-git.R 8b1a43d390b4aaf912729ce01e7794ee *tests/testthat/test-install-github.R 5dabb90e2b5f8ccf9dc5362f95cf78d1 *tests/testthat/test-install-gitlab.R c12cdc311721bb7cf789a3dfb7ab36e3 *tests/testthat/test-install-local.R b542a680b50a9d0fa810e7897cc684bc *tests/testthat/test-install-remote.R 298c84902e410b25661acffc37284c10 *tests/testthat/test-install-svn.R 35986d33af1d226b821fa3a57842461a *tests/testthat/test-install-url.R caa8591df7f897ff309779eaa4b9c619 *tests/testthat/test-install-version.R 88cd68b70451b6a018fbb0dae2ef6ee2 *tests/testthat/test-install.R 152520be5f9cd91fd470816ad6f94e43 *tests/testthat/test-json.R e6fb13961ed17d6466cca8511b61d673 *tests/testthat/test-package-deps.R 94a273596db8b12b86865cd2aa52c042 *tests/testthat/test-package.R 39a7290761e56054ebba53e9af89d964 *tests/testthat/test-parse-git.R 166e639d08fd0cec40a6e9d398d7211b *tests/testthat/test-script.R 4cded1166cae02a3ec4181945e39ee54 *tests/testthat/test-submodule.R 08a41508665d958b10d4fdca49ed4d34 *tests/testthat/test-system.R 41f17dc6688bf87fb25911310d2272f9 *tests/testthat/test-utils.R 886d9a6cd17d7f84ae6360ddd8ca0d58 *tests/testthat/withremotes/DESCRIPTION 458664f52739e0f183ec71ab2e2fb363 *vignettes/dependencies.Rmd remotes/inst/0000755000176200001440000000000013621331470012705 5ustar liggesusersremotes/inst/install-github.Rin0000644000176200001440000000131013502702607016302 0ustar liggesusers# Autogenerated from contents in the package's R directory, do not edit! # Run make to update. function(...) { ## This is the code of the package, put in here by brew <% lapply( sort(list.files("R", full.names = TRUE)), function(x) { text <- c(paste0("# Contents of ", x), readLines(x)) cat(paste0(" ", text), sep = "\n") } ) %> ## Standalone mode, make sure that we restore the env var on exit old <- Sys.getenv("R_REMOTES_STANDALONE", NA_character_) Sys.setenv("R_REMOTES_STANDALONE" = "true") if (is.na(old)) { on.exit(Sys.unsetenv("R_REMOTES_STANDALONE"), add = TRUE) } else { on.exit(Sys.setenv("R_REMOTES_STANDALONE" = old), add = TRUE) } install_github(...) } remotes/inst/doc/0000755000176200001440000000000013621331470013452 5ustar liggesusersremotes/inst/doc/dependencies.Rmd0000644000176200001440000000532113362420362016546 0ustar liggesusers--- title: "Dependency resolution for R package development" author: "Jim Hester, Hadley Wickham, Gábor Csárdi" date: "`r Sys.Date()`" output: rmarkdown::html_vignette: keep_md: true vignette: > %\VignetteIndexEntry{Dependency resolution for R package development} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} --- # Package remotes Remotes, just like devtools, supports package dependency installation for packages not yet in a standard package repository such as [CRAN](https://cran.r-project.org) or [Bioconductor](http://bioconductor.org). You can mark any regular dependency defined in the `Depends`, `Imports`, `Suggests` or `Enhances` fields as being installed from a remote location by adding the remote location to `Remotes` in your `DESCRIPTION` file. This will cause remotes to download and install them from the specified location, instead of CRAN. The remote dependencies specified in `Remotes` should be described in the following form. ``` Remotes: [type::], [type2::] ``` The `type` is an optional parameter. If the type is missing the default is to install from GitHub. Additional remote dependencies should be separated by commas, just like normal dependencies elsewhere in the `DESCRIPTION` file. ### GitHub Because GitHub is the most commonly used unofficial package distribution in R, it's the default: ```yaml Remotes: r-lib/testthat ``` You can also specify a specific hash, tag, or pull request (using the same syntax as `install_github()` if you want a particular commit. Otherwise the latest commit on the master branch is used. ```yaml Remotes: r-lib/httr@v0.4, klutometis/roxygen#142, r-lib/testthat@c67018fa4970 ``` The special `@*release` syntax will install the latest release: ```yaml Remotes: r-lib/testthat@*release ``` A type of 'github' can be specified, but is not required ```yaml Remotes: github::tidyverse/ggplot2 ``` ### Other sources All of the currently supported install sources are available, see the 'See Also' section in `?install_github` for a complete list. ```yaml # GitLab Remotes: gitlab::jimhester/covr # Git Remotes: git::git@bitbucket.org:dannavarro/lsr-package.git # Bitbucket Remotes: bitbucket::sulab/mygene.r@default, dannavarro/lsr-package # Bioconductor Remotes: bioc::3.3/SummarizedExperiment#117513, bioc::release/Biobase # SVN Remotes: svn::https://github.com/tidyverse/stringr # URL Remotes: url::https://github.com/tidyverse/stringr/archive/master.zip # Local Remotes: local::/pkgs/testthat ``` ### CRAN submission When you submit your package to CRAN, all of its dependencies must also be available on CRAN. For this reason, `devtools::release()` will warn you if you try to release a package with a `Remotes` field. remotes/inst/doc/dependencies.html0000644000176200001440000003260013621331470016767 0ustar liggesusers Dependency resolution for R package development

Dependency resolution for R package development

Jim Hester, Hadley Wickham, Gábor Csárdi

2020-02-13

Package remotes

Remotes, just like devtools, supports package dependency installation for packages not yet in a standard package repository such as CRAN or Bioconductor.

You can mark any regular dependency defined in the Depends, Imports, Suggests or Enhances fields as being installed from a remote location by adding the remote location to Remotes in your DESCRIPTION file. This will cause remotes to download and install them from the specified location, instead of CRAN.

The remote dependencies specified in Remotes should be described in the following form.

Remotes: [type::]<Repository>, [type2::]<Repository2>

The type is an optional parameter. If the type is missing the default is to install from GitHub. Additional remote dependencies should be separated by commas, just like normal dependencies elsewhere in the DESCRIPTION file.

GitHub

Because GitHub is the most commonly used unofficial package distribution in R, it’s the default:

You can also specify a specific hash, tag, or pull request (using the same syntax as install_github() if you want a particular commit. Otherwise the latest commit on the master branch is used.

The special @*release syntax will install the latest release:

A type of ‘github’ can be specified, but is not required

CRAN submission

When you submit your package to CRAN, all of its dependencies must also be available on CRAN. For this reason, devtools::release() will warn you if you try to release a package with a Remotes field.

remotes/inst/install-github.R0000644000176200001440000050401713621330565015771 0ustar liggesusers# Autogenerated from contents in the package's R directory, do not edit! # Run make to update. function(...) { ## This is the code of the package, put in here by brew # Contents of R/bioc-standalone.R #' Tools for Bioconductor versions and repositories #' #' \section{API:} #' #' ``` #' get_yaml_config(forget = FALSE) #' set_yaml_config(text) #' #' get_release_version(forget = FALSE) #' get_devel_version(forget = FALSE) #' #' get_version_map(forget = FALSE) #' get_matching_bioc_version(r_version = getRversion(), forget = FALSE) #' get_bioc_version(r_version = getRversion(), forget = FALSE) #' #' get_repos(bioc_version = "auto", forget = FALSE) #' ``` #' #' * `forget`: Whether to forget the cached version of the Bioconductor #' config YAML file and download it again. #' * `text`: character vector (linewise) or scalar, the contents of the #' `config.yaml` file, if obtained externally, to be used as a cached #' version in the future. #' * `r_version`: R version string, or `package_version` object. #' * `bioc_version`: Bioc version string or `package_version` object, #' or the string `"auto"` to use the one matching the current R version. #' #' `get_yaml_config()` returns the raw contents of the `config.yaml` file, #' linewise. It is typically not needed, except if one needs information #' that cannot be surfaces via the other API functions. #' #' `set_yaml_config()` can be used to _set_ the contents of the #' `config.yaml` file. This is useful, if one has already obtained it #' externally, but wants to use the obtained file with the rest of the #' bioc standalone code. #' #' `get_release_version()` returns the version of the current Bioconductor #' release. #' #' `get_devel_version()` returns the version of the current development #' version of Bioconductor. #' #' `get_version_map()` return the mapping between R versions and #' Bioconductor versions. Note that this is not a one to one mapping. #' E.g. currently R `3.6.x` maps to both Bioc `3.9` (Bioc release) and #' `3.10` (Bioc devel); and also Bioc `3.10` maps to both R `3.6.x` and #' R `3.7.x` (current R-devel). It returns a data frame with three columns: #' `bioc_version`, `r_version` and `bioc_status`. The first two columns #' contain `package_vesion` objects, the third is a factor with levels: #' `out-of-date`, `release`, `devel`, `future`. #' #' `get_matching_bioc_version()` returns the matching Bioc version for an #' R version. If the R version matches to both a released and a devel #' version, then the released version is chosen. #' #' `get_bioc_version()` returns the matching Bioc version for the #' specified R version. It does observe the `R_BIOC_VERSION` environment #' variable, which can be used to force a Bioconductor version. If this is #' not set, it just calls `get_matching_bioc_version()`. #' #' `get_repos()` returns the Bioc repositories of the specified Bioc #' version. It defaults to the Bioc version that matches the calling R #' version. It returns a named character vector. #' #' \section{NEWS:} #' * 2019-05-30 First version in remotes. #' #' #' @name bioconductor #' @keywords internal #' @noRd NULL bioconductor <- local({ # ------------------------------------------------------------------- # Configuration that does not change often config_url <- "https://bioconductor.org/config.yaml" builtin_map <- list( "2.1" = package_version("1.6"), "2.2" = package_version("1.7"), "2.3" = package_version("1.8"), "2.4" = package_version("1.9"), "2.5" = package_version("2.0"), "2.6" = package_version("2.1"), "2.7" = package_version("2.2"), "2.8" = package_version("2.3"), "2.9" = package_version("2.4"), "2.10" = package_version("2.5"), "2.11" = package_version("2.6"), "2.12" = package_version("2.7"), "2.13" = package_version("2.8"), "2.14" = package_version("2.9"), "2.15" = package_version("2.11"), "3.0" = package_version("2.13"), "3.1" = package_version("3.0"), "3.2" = package_version("3.2"), "3.3" = package_version("3.4"), "3.4" = package_version("3.6"), "3.5" = package_version("3.8"), "3.6" = package_version("3.10") ) # ------------------------------------------------------------------- # Cache devel_version <- NULL release_version <- NULL version_map <- NULL yaml_config <- NULL # ------------------------------------------------------------------- # API get_yaml_config <- function(forget = FALSE) { if (forget || is.null(yaml_config)) { new <- tryCatch(read_url(config_url), error = function(x) x) if (inherits(new, "error")) { http_url <- sub("^https", "http", config_url) new <- tryCatch(read_url(http_url), error = function(x) x) } if (inherits(new, "error")) stop(new) yaml_config <<- new } yaml_config } set_yaml_config <- function(text) { if (length(text) == 1) text <- strsplit(text, "\n", fixed = TRUE)[[1]] yaml_config <<- text } get_release_version <- function(forget = FALSE) { if (forget || is.null(release_version)) { yaml <- get_yaml_config(forget) pattern <- "^release_version: \"(.*)\"" release_version <<- package_version( sub(pattern, "\\1", grep(pattern, yaml, value=TRUE)) ) } release_version } get_devel_version <- function(forget = FALSE) { if (forget || is.null(devel_version)) { yaml <- get_yaml_config(forget) pattern <- "^devel_version: \"(.*)\"" devel_version <<- package_version( sub(pattern, "\\1", grep(pattern, yaml, value=TRUE)) ) } devel_version } get_version_map <- function(forget = FALSE) { if (forget || is.null(version_map)) { txt <- get_yaml_config(forget) grps <- grep("^[^[:blank:]]", txt) start <- match(grep("r_ver_for_bioc_ver", txt), grps) map <- txt[seq(grps[start] + 1, grps[start + 1] - 1)] map <- trimws(gsub("\"", "", sub(" #.*", "", map))) pattern <- "(.*): (.*)" bioc <- package_version(sub(pattern, "\\1", map)) r <- package_version(sub(pattern, "\\2", map)) status <- rep("out-of-date", length(bioc)) release <- get_release_version() devel <- get_devel_version() status[bioc == release] <- "release" status[bioc == devel] <- "devel" # append final version for 'devel' R bioc <- c( bioc, max(bioc) ) r <- c(r, package_version(paste(unlist(max(r)) + 0:1, collapse = "."))) status <- c(status, "future") version_map <<- rbind( .VERSION_MAP_SENTINEL, data.frame( bioc_version = bioc, r_version = r, bioc_status = factor( status, levels = c("out-of-date", "release", "devel", "future") ) ) ) } version_map } get_matching_bioc_version <- function(r_version = getRversion(), forget = FALSE) { minor <- as.character(get_minor_r_version(r_version)) if (minor %in% names(builtin_map)) return(builtin_map[[minor]]) # If we are not in the map, then we need to look this up in # YAML data. map <- get_version_map(forget = forget) mine <- match(package_version(minor), map$r_version) if (!is.na(mine)) return(map$bioc_version[mine]) # If it is not even in the YAML, then it must be some very old # or very new version. If old, we fail. If new, we assume bioc-devel. if (package_version(minor) < "2.1") { stop("R version too old, cannot run Bioconductor") } get_devel_version() } get_bioc_version <- function(r_version = getRversion(), forget = FALSE) { if (nzchar(v <- Sys.getenv("R_BIOC_VERSION", ""))) { return(package_version(v)) } get_matching_bioc_version(r_version, forget = forget) } get_repos <- function(bioc_version = "auto", forget = FALSE) { if (identical(bioc_version, "auto")) { bioc_version <- get_bioc_version(getRversion(), forget) } else { bioc_version <- package_version(bioc_version) } mirror <- Sys.getenv("R_BIOC_MIRROR", "https://bioconductor.org") mirror <- getOption("BioC_mirror", mirror) repos <- c( BioCsoft = "{mirror}/packages/{bv}/bioc", BioCann = "{mirror}/packages/{bv}/data/annotation", BioCexp = "{mirror}/packages/{bv}/data/experiment", BioCworkflows = if (bioc_version >= "3.7") "{mirror}/packages/{bv}/workflows", BioCextra = if (bioc_version <= "3.5") "{mirror}/packages/{bv}/extra" ) ## It seems that if a repo is not available yet for bioc-devel, ## they redirect to the bioc-release version, so we do not need to ## parse devel_repos from the config.yaml file sub("{mirror}", mirror, fixed = TRUE, sub("{bv}", bioc_version, repos, fixed = TRUE)) } # ------------------------------------------------------------------- # Internals read_url <- function(url) { tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) suppressWarnings(download.file(url, tmp, quiet = TRUE)) if (!file.exists(tmp) || file.info(tmp)$size == 0) { stop("Failed to download `", url, "`") } readLines(tmp, warn = FALSE) } .VERSION_SENTINEL <- local({ version <- package_version(list()) class(version) <- c("unknown_version", class(version)) version }) .VERSION_MAP_SENTINEL <- data.frame( bioc_version = .VERSION_SENTINEL, r_version = .VERSION_SENTINEL, bioc_status = factor( factor(), levels = c("out-of-date", "release", "devel", "future") ) ) get_minor_r_version <- function (x) { package_version(x)[,1:2] } # ------------------------------------------------------------------- structure( list( .internal = environment(), get_yaml_config = get_yaml_config, set_yaml_config = set_yaml_config, get_release_version = get_release_version, get_devel_version = get_devel_version, get_version_map = get_version_map, get_matching_bioc_version = get_matching_bioc_version, get_bioc_version = get_bioc_version, get_repos = get_repos ), class = c("standalone_bioc", "standalone")) }) # Contents of R/bioc.R #' @export #' @rdname bioc_install_repos #' @keywords internal #' @examples #' bioc_version() #' bioc_version("3.4") bioc_version <- function(r_ver = getRversion()) { bioconductor$get_bioc_version(r_ver) } #' Tools for Bioconductor repositories #' #' `bioc_version()` returns the Bioconductor version for the current or the #' specified R version. #' #' `bioc_install_repos()` deduces the URLs of the Bioconductor repositories. #' #' @details #' Both functions observe the `R_BIOC_VERSION` environment variable, which #' can be set to force a Bioconductor version. If this is set, then the #' `r_ver` and `bioc_ver` arguments are ignored. #' #' `bioc_install_repos()` observes the `R_BIOC_MIRROR` environment variable #' and also the `BioC_mirror` option, which can be set to the desired #' Bioconductor mirror. The option takes precedence if both are set. Its #' default value is `https://bioconductor.org`. #' #' @return #' `bioc_version()` returns a Bioconductor version, a `package_version` #' object. #' #' `bioc_install_repos()` returns a named character vector of the URLs of #' the Bioconductor repositories, appropriate for the current or the #' specified R version. #' #' @param r_ver R version to use. For `bioc_install_repos()` it is #' ignored if `bioc_ver` is specified. #' @param bioc_ver Bioconductor version to use. Defaults to the default one #' corresponding to `r_ver`. #' #' @export #' @keywords internal #' @examples #' bioc_install_repos() bioc_install_repos <- function(r_ver = getRversion(), bioc_ver = bioc_version(r_ver)) { bioconductor$get_repos(bioc_ver) } # Contents of R/circular.R ## A environment to hold which packages are being installed so packages ## with circular dependencies can be skipped the second time. installing <- new.env(parent = emptyenv()) is_root_install <- function() is.null(installing$packages) exit_from_root_install <- function() installing$packages <- NULL check_for_circular_dependencies <- function(pkgdir, quiet) { pkgdir <- normalizePath(pkgdir) pkg <- get_desc_field(file.path(pkgdir, "DESCRIPTION"), "Package") if (pkg %in% installing$packages) { if (!quiet) message("Skipping ", pkg, ", it is already being installed") TRUE } else { installing$packages <- c(installing$packages, pkg) FALSE } } # Contents of R/cran.R cache <- new.env(parent = emptyenv()) #' @rdname available_packages #' @export available_packages_set <- function(repos, type, db) { signature <- rawToChar(serialize(list(repos, type), NULL, ascii = TRUE)) if (is.null(cache[[signature]])) { cache[[signature]] <- db } cache[[signature]] } #' @rdname available_packages #' @export available_packages_reset <- function() { rm(list = ls(envir = cache), envir = cache) } #' Simpler available.packages #' #' This is mostly equivalent to [utils::available.packages()] however it also #' caches the full result. Additionally the cache can be assigned explicitly with #' [available_packages_set()] and reset (cleared) with [available_packages_reset()]. #' #' @inheritParams utils::available.packages #' @keywords internal #' @seealso [utils::available.packages()] for full documentation on the output format. #' @export available_packages <- function(repos = getOption("repos"), type = getOption("pkgType")) { available_packages_set( repos, type, suppressWarnings(utils::available.packages(utils::contrib.url(repos, type), type = type)) ) } # Contents of R/dcf.R read_dcf <- function(path) { fields <- colnames(read.dcf(path)) as.list(read.dcf(path, keep.white = fields)[1, ]) } write_dcf <- function(path, desc) { write.dcf( rbind(unlist(desc)), file = path, keep.white = names(desc), indent = 0 ) } get_desc_field <- function(path, field) { dcf <- read_dcf(path) dcf[[field]] } # Contents of R/decompress.R # Decompress pkg, if needed source_pkg <- function(path, subdir = NULL) { if (!dir.exists(path)) { bundle <- path outdir <- tempfile(pattern = "remotes") dir.create(outdir) path <- decompress(path, outdir) } else { bundle <- NULL } pkg_path <- if (is.null(subdir)) path else file.path(path, subdir) # Check it's an R package if (!file.exists(file.path(pkg_path, "DESCRIPTION"))) { stop("Does not appear to be an R package (no DESCRIPTION)", call. = FALSE) } # Check configure is executable if present config_path <- file.path(pkg_path, "configure") if (file.exists(config_path)) { Sys.chmod(config_path, "777") } pkg_path } decompress <- function(src, target) { stopifnot(file.exists(src)) if (grepl("\\.zip$", src)) { my_unzip(src, target) outdir <- getrootdir(as.vector(utils::unzip(src, list = TRUE)$Name)) } else if (grepl("\\.(tar|tar\\.gz|tar\\.bz2|tgz|tbz)$", src)) { untar(src, exdir = target) outdir <- getrootdir(untar(src, list = TRUE)) } else { ext <- gsub("^[^.]*\\.", "", src) stop("Don't know how to decompress files with extension ", ext, call. = FALSE) } file.path(target, outdir) } # Returns everything before the last slash in a filename # getdir("path/to/file") returns "path/to" # getdir("path/to/dir/") returns "path/to/dir" getdir <- function(path) sub("/[^/]*$", "", path) # Given a list of files, returns the root (the topmost folder) # getrootdir(c("path/to/file", "path/to/other/thing")) returns "path/to" # It does not check that all paths have a common prefix. It fails for # empty input vector. It assumes that directories end with '/'. getrootdir <- function(file_list) { stopifnot(length(file_list) > 0) slashes <- nchar(gsub("[^/]", "", file_list)) if (min(slashes) == 0) return(".") getdir(file_list[which.min(slashes)]) } my_unzip <- function(src, target, unzip = getOption("unzip", "internal")) { if (unzip %in% c("internal", "")) { return(utils::unzip(src, exdir = target)) } args <- paste( "-oq", shQuote(src), "-d", shQuote(target) ) system_check(unzip, args) } # Contents of R/deps.R #' Find all dependencies of a CRAN or dev package. #' #' Find all the dependencies of a package and determine whether they are ahead #' or behind CRAN. A `print()` method identifies mismatches (if any) #' between local and CRAN versions of each dependent package; an #' `update()` method installs outdated or missing packages from CRAN. #' #' @param packages A character vector of package names. #' @param pkgdir path to a package directory, or to a package tarball. #' @param dependencies Which dependencies do you want to check? #' Can be a character vector (selecting from "Depends", "Imports", #' "LinkingTo", "Suggests", or "Enhances"), or a logical vector. #' #' `TRUE` is shorthand for "Depends", "Imports", "LinkingTo" and #' "Suggests". `NA` is shorthand for "Depends", "Imports" and "LinkingTo" #' and is the default. `FALSE` is shorthand for no dependencies (i.e. #' just check this package, not its dependencies). #' @param quiet If `TRUE`, suppress output. #' @param upgrade One of "default", "ask", "always", or "never". "default" #' respects the value of the `R_REMOTES_UPGRADE` environment variable if set, #' and falls back to "ask" if unset. "ask" prompts the user for which out of #' date packages to upgrade. For non-interactive sessions "ask" is equivalent #' to "always". `TRUE` and `FALSE` are also accepted and correspond to #' "always" and "never" respectively. #' @param repos A character vector giving repositories to use. #' @param type Type of package to `update`. #' #' @param object A `package_deps` object. #' @param ... Additional arguments passed to `install_packages`. #' @inheritParams install_github #' #' @return #' #' A `data.frame` with columns: #' #' \tabular{ll}{ #' `package` \tab The dependent package's name,\cr #' `installed` \tab The currently installed version,\cr #' `available` \tab The version available on CRAN,\cr #' `diff` \tab An integer denoting whether the locally installed version #' of the package is newer (1), the same (0) or older (-1) than the version #' currently available on CRAN.\cr #' } #' #' @export #' @examples #' \dontrun{ #' package_deps("devtools") #' # Use update to update any out-of-date dependencies #' update(package_deps("devtools")) #' } package_deps <- function(packages, dependencies = NA, repos = getOption("repos"), type = getOption("pkgType")) { repos <- fix_repositories(repos) cran <- available_packages(repos, type) deps <- find_deps(packages, available = cran, top_dep = dependencies) # Remove base packages inst <- utils::installed.packages() base <- unname(inst[inst[, "Priority"] %in% c("base", "recommended"), "Package"]) deps <- setdiff(deps, base) # get remote types remote <- structure(lapply(deps, package2remote, repos = repos, type = type), class = "remotes") inst_ver <- vapply(deps, local_sha, character(1)) cran_ver <- vapply(remote, function(x) remote_sha(x), character(1)) is_cran_remote <- vapply(remote, inherits, logical(1), "cran_remote") diff <- compare_versions(inst_ver, cran_ver, is_cran_remote) res <- structure( data.frame( package = deps, installed = inst_ver, available = cran_ver, diff = diff, is_cran = is_cran_remote, stringsAsFactors = FALSE ), class = c("package_deps", "data.frame") ) res$remote <- remote res } #' `local_package_deps` extracts dependencies from a #' local DESCRIPTION file. #' #' @export #' @rdname package_deps local_package_deps <- function(pkgdir = ".", dependencies = NA) { pkg <- load_pkg_description(pkgdir) dependencies <- tolower(standardise_dep(dependencies)) dependencies <- intersect(dependencies, names(pkg)) parsed <- lapply(pkg[tolower(dependencies)], parse_deps) unlist(lapply(parsed, `[[`, "name"), use.names = FALSE) } #' `dev_package_deps` lists the status of the dependencies #' of a local package. #' #' @export #' @rdname package_deps dev_package_deps <- function(pkgdir = ".", dependencies = NA, repos = getOption("repos"), type = getOption("pkgType")) { pkg <- load_pkg_description(pkgdir) repos <- c(repos, parse_additional_repositories(pkg)) deps <- local_package_deps(pkgdir = pkgdir, dependencies = dependencies) if (is_bioconductor(pkg)) { bioc_repos <- bioc_install_repos() missing_repos <- setdiff(names(bioc_repos), names(repos)) if (length(missing_repos) > 0) repos[missing_repos] <- bioc_repos[missing_repos] } combine_deps( package_deps(deps, repos = repos, type = type), remote_deps(pkg)) } combine_deps <- function(cran_deps, remote_deps) { # If there are no dependencies there will be no remote dependencies either, # so just return them (and don't force the remote_deps promise) if (nrow(cran_deps) == 0) { return(cran_deps) } # Only keep the remotes that are specified in the cran_deps or are NA remote_deps <- remote_deps[is.na(remote_deps$package) | remote_deps$package %in% cran_deps$package, ] # If there are remote deps remove the equivalent CRAN deps cran_deps <- cran_deps[!(cran_deps$package %in% remote_deps$package), ] rbind(remote_deps, cran_deps) } ## -2 = not installed, but available on CRAN ## -1 = installed, but out of date ## 0 = installed, most recent version ## 1 = installed, version ahead of CRAN ## 2 = package not on CRAN compare_versions <- function(inst, remote, is_cran) { stopifnot(length(inst) == length(remote) && length(inst) == length(is_cran)) compare_var <- function(i, c, cran) { if (!cran) { if (identical(i, c)) { return(CURRENT) } else { return(BEHIND) } } if (is.na(c)) return(UNAVAILABLE) # not on CRAN if (is.na(i)) return(UNINSTALLED) # not installed, but on CRAN i <- package_version(i) c <- package_version(c) if (i < c) { BEHIND # out of date } else if (i > c) { AHEAD # ahead of CRAN } else { CURRENT # most recent CRAN version } } vapply(seq_along(inst), function(i) compare_var(inst[[i]], remote[[i]], is_cran[[i]]), integer(1)) } has_dev_remotes <- function(pkg) { !is.null(pkg[["remotes"]]) } #' @export print.package_deps <- function(x, show_ok = FALSE, ...) { class(x) <- "data.frame" x$remote <-lapply(x$remote, format) ahead <- x$diff > 0L behind <- x$diff < 0L same_ver <- x$diff == 0L x$diff <- NULL x[] <- lapply(x, format_str, width = 12) if (any(behind)) { cat("Needs update -----------------------------\n") print(x[behind, , drop = FALSE], row.names = FALSE, right = FALSE) } if (any(ahead)) { cat("Not on CRAN ----------------------------\n") print(x[ahead, , drop = FALSE], row.names = FALSE, right = FALSE) } if (show_ok && any(same_ver)) { cat("OK ---------------------------------------\n") print(x[same_ver, , drop = FALSE], row.names = FALSE, right = FALSE) } } ## -2 = not installed, but available on CRAN ## -1 = installed, but out of date ## 0 = installed, most recent version ## 1 = installed, version ahead of CRAN ## 2 = package not on CRAN UNINSTALLED <- -2L BEHIND <- -1L CURRENT <- 0L AHEAD <- 1L UNAVAILABLE <- 2L #' @export #' @rdname package_deps #' @importFrom stats update update.package_deps <- function(object, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { object <- upgradable_packages(object, upgrade, quiet) unavailable_on_cran <- object$diff == UNAVAILABLE & object$is_cran unknown_remotes <- (object$diff == UNAVAILABLE | object$diff == UNINSTALLED) & !object$is_cran if (any(unavailable_on_cran) && !quiet) { message("Skipping ", sum(unavailable_on_cran), " packages not available: ", paste(object$package[unavailable_on_cran], collapse = ", ")) } if (any(unknown_remotes)) { install_remotes(object$remote[unknown_remotes], dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } ahead_of_cran <- object$diff == AHEAD & object$is_cran if (any(ahead_of_cran) && !quiet) { message("Skipping ", sum(ahead_of_cran), " packages ahead of CRAN: ", paste(object$package[ahead_of_cran], collapse = ", ")) } ahead_remotes <- object$diff == AHEAD & !object$is_cran if (any(ahead_remotes)) { install_remotes(object$remote[ahead_remotes], dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } behind <- is.na(object$installed) | object$diff < CURRENT if (any(object$is_cran & !unavailable_on_cran & behind)) { # get the first cran-like remote and use its repos and pkg_type r <- object$remote[object$is_cran & behind][[1]] install_packages(object$package[object$is_cran & behind], repos = r$repos, type = r$pkg_type, dependencies = dependencies, quiet = quiet, ...) } install_remotes(object$remote[!object$is_cran & behind], dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) invisible() } install_packages <- function(packages, repos = getOption("repos"), type = getOption("pkgType"), ..., dependencies = FALSE, quiet = NULL) { # We want to pass only args that exist in the downstream functions args_to_keep <- unique( names( c( formals(utils::install.packages), formals(utils::download.file) ) ) ) args <- list(...) args <- args[names(args) %in% args_to_keep] if (is.null(quiet)) quiet <- !identical(type, "source") message("Installing ", length(packages), " packages: ", paste(packages, collapse = ", ")) do.call( safe_install_packages, c(list( packages, repos = repos, type = type, dependencies = dependencies, quiet = quiet ), args ) ) } find_deps <- function(packages, available = available_packages(), top_dep = TRUE, rec_dep = NA, include_pkgs = TRUE) { if (length(packages) == 0 || identical(top_dep, FALSE)) return(character()) top_dep <- standardise_dep(top_dep) rec_dep <- standardise_dep(rec_dep) top <- tools::package_dependencies(packages, db = available, which = top_dep) top_flat <- unlist(top, use.names = FALSE) if (length(rec_dep) != 0 && length(top_flat) > 0) { rec <- tools::package_dependencies(top_flat, db = available, which = rec_dep, recursive = TRUE) rec_flat <- unlist(rec, use.names = FALSE) } else { rec_flat <- character() } # We need to put the recursive dependencies _before_ the top dependencies, to # ensure that any dependencies are installed before their parents are loaded. unique(c(if (include_pkgs) packages, rec_flat, top_flat)) } #' Standardise dependencies using the same logical as [install.packages] #' #' @param x The dependencies to standardise. #' A character vector (selecting from "Depends", "Imports", #' "LinkingTo", "Suggests", or "Enhances"), or a logical vector. #' #' `TRUE` is shorthand for "Depends", "Imports", "LinkingTo" and #' "Suggests". `NA` is shorthand for "Depends", "Imports" and "LinkingTo" #' and is the default. `FALSE` is shorthand for no dependencies. #' #' @seealso for #' additional information on what each dependency type means. #' @keywords internal #' @export standardise_dep <- function(x) { if (identical(x, NA)) { c("Depends", "Imports", "LinkingTo") } else if (isTRUE(x)) { c("Depends", "Imports", "LinkingTo", "Suggests") } else if (identical(x, FALSE)) { character(0) } else if (is.character(x)) { x } else { stop("Dependencies must be a boolean or a character vector", call. = FALSE) } } #' Update packages that are missing or out-of-date. #' #' Works similarly to [utils::install.packages()] but doesn't install packages #' that are already installed, and also upgrades out dated dependencies. #' #' @param packages Character vector of packages to update. #' @inheritParams install_github #' @seealso [package_deps()] to see which packages are out of date/ #' missing. #' @export #' @examples #' \dontrun{ #' update_packages("ggplot2") #' update_packages(c("plyr", "ggplot2")) #' } update_packages <- function(packages = TRUE, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { if (isTRUE(packages)) { packages <- utils::installed.packages()[, "Package"] } pkgs <- package_deps(packages, repos = repos, type = type) update(pkgs, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } has_additional_repositories <- function(pkg) { "additional_repositories" %in% names(pkg) } parse_additional_repositories <- function(pkg) { if (has_additional_repositories(pkg)) { strsplit(trim_ws(pkg[["additional_repositories"]]), "[,[:space:]]+")[[1]] } } fix_repositories <- function(repos) { if (length(repos) == 0) repos <- character() # Override any existing default values with the cloud mirror # Reason: A "@CRAN@" value would open a GUI for choosing a mirror repos[repos == "@CRAN@"] <- download_url("cloud.r-project.org") repos } parse_one_remote <- function(x, ...) { pieces <- strsplit(x, "::", fixed = TRUE)[[1]] if (length(pieces) == 1) { type <- "github" repo <- pieces } else if (length(pieces) == 2) { type <- pieces[1] repo <- pieces[2] } else { stop("Malformed remote specification '", x, "'", call. = FALSE) } tryCatch({ # We need to use `environment(sys.function())` instead of # `asNamespace("remotes")` because when used as a script in # install-github.R there is no remotes namespace. fun <- get(paste0(tolower(type), "_remote"), envir = environment(sys.function()), mode = "function", inherits = FALSE) res <- fun(repo, ...) }, error = function(e) stop("Unknown remote type: ", type, "\n ", conditionMessage(e), call. = FALSE) ) res } split_remotes <- function(x) { pkgs <- trim_ws(unlist(strsplit(x, ",[[:space:]]*"))) if (any((res <- grep("[[:space:]]+", pkgs)) != -1)) { stop("Missing commas separating Remotes: '", pkgs[res], "'", call. = FALSE) } pkgs } package_deps_new <- function(package = character(), installed = character(), available = character(), diff = logical(), is_cran = logical(), remote = list()) { res <- structure( data.frame(package = package, installed = installed, available = available, diff = diff, is_cran = is_cran, stringsAsFactors = FALSE), class = c("package_deps", "data.frame") ) res$remote = structure(remote, class = "remotes") res } remote_deps <- function(pkg) { if (!has_dev_remotes(pkg)) { return(package_deps_new()) } dev_packages <- split_remotes(pkg[["remotes"]]) remote <- lapply(dev_packages, parse_one_remote) package <- vapply(remote, function(x) remote_package_name(x), character(1), USE.NAMES = FALSE) installed <- vapply(package, function(x) local_sha(x), character(1), USE.NAMES = FALSE) available <- vapply(remote, function(x) remote_sha(x), character(1), USE.NAMES = FALSE) diff <- installed == available diff <- ifelse(!is.na(diff) & diff, CURRENT, BEHIND) diff[is.na(installed)] <- UNINSTALLED package_deps_new(package, installed, available, diff, is_cran = FALSE, remote) } # interactive is an argument to make testing easier. resolve_upgrade <- function(upgrade, is_interactive = interactive()) { if (isTRUE(upgrade)) { upgrade <- "always" } else if (identical(upgrade, FALSE)) { upgrade <- "never" } upgrade <- match.arg(upgrade[[1]], c("default", "ask", "always", "never")) if (identical(upgrade, "default")) upgrade <- Sys.getenv("R_REMOTES_UPGRADE", unset = "ask") if (!is_interactive && identical(upgrade, "ask")) { upgrade <- "always" } upgrade } upgradable_packages <- function(x, upgrade, quiet, is_interactive = interactive()) { uninstalled <- x$diff == UNINSTALLED behind <- x$diff == BEHIND switch(resolve_upgrade(upgrade, is_interactive = is_interactive), always = { return(msg_upgrades(x, quiet)) }, never = return(x[uninstalled, ]), ask = { if (!any(behind)) { return(x) } pkgs <- format_upgrades(x[behind, ]) choices <- pkgs if (length(choices) > 0) { choices <- c("All", "CRAN packages only", "None", choices) } res <- select_menu(choices, title = "These packages have more recent versions available.\nIt is recommended to update all of them.\nWhich would you like to update?") if ("None" %in% res || length(res) == 0) { return(x[uninstalled, ]) } if ("All" %in% res) { wch <- seq_len(NROW(x)) } else { if ("CRAN packages only" %in% res) { wch <- uninstalled | (behind & x$is_cran) } else { wch <- sort(c(which(uninstalled), which(behind)[pkgs %in% res])) } } msg_upgrades(x[wch, ], quiet) } ) } select_menu <- function(choices, title = NULL, msg = "Enter one or more numbers, or an empty line to skip updates:", width = getOption("width")) { if (!is.null(title)) { cat(title, "\n", sep = "") } nc <- length(choices) op <- paste0(format(seq_len(nc)), ": ", choices) fop <- format(op) cat("", fop, "", sep = "\n") repeat { cat(msg, "\n", sep = "") answer <- readLines(n = 1) answer <- strsplit(answer, "[ ,]+")[[1]] if (all(answer %in% seq_along(choices))) { return(choices[as.integer(answer)]) } } } msg_upgrades <- function(x, quiet) { if (isTRUE(quiet) || nrow(x) == 0) { return(invisible(x)) } cat(format_upgrades(x[x$diff <= BEHIND, ]), sep = "\n") invisible(x) } format_upgrades <- function(x) { if (nrow(x) == 0) { return(character(0)) } remote_type <- lapply(x$remote, format) # This call trims widths to 12 characters x[] <- lapply(x, format_str, width = 12) # This call aligns the columns x[] <- lapply(x, format, trim = FALSE, justify = "left") pkgs <- paste0(x$package, " (", x$installed, " -> ", x$available, ") ", "[", remote_type, "]") pkgs } # Contents of R/devel.R ## The checking code looks for the objects in the package namespace, so defining ## dll here removes the following NOTE ## Registration problem: ## Evaluating ‘dll$foo’ during check gives error ## ‘object 'dll' not found’: ## .C(dll$foo, 0L) ## See https://github.com/wch/r-source/blob/d4e8fc9832f35f3c63f2201e7a35fbded5b5e14c/src/library/tools/R/QC.R##L1950-L1980 ## Setting the class is needed to avoid a note about returning the wrong class. ## The local object is found first in the actual call, so current behavior is ## unchanged. dll <- list(foo = structure(list(), class = "NativeSymbolInfo")) has_devel <- function() { tryCatch( has_devel2(), error = function(e) FALSE ) } ## This is similar to devtools:::has_devel(), with some ## very minor differences. has_devel2 <- function() { foo_path <- file.path(tempfile(fileext = ".c")) cat("void foo(int *bar) { *bar=1; }\n", file = foo_path) on.exit(unlink(foo_path)) R(c("CMD", "SHLIB", basename(foo_path)), dirname(foo_path)) dylib <- sub("\\.c$", .Platform$dynlib.ext, foo_path) on.exit(unlink(dylib), add = TRUE) dll <- dyn.load(dylib) on.exit(dyn.unload(dylib), add = TRUE) stopifnot(.C(dll$foo, 0L)[[1]] == 1L) TRUE } missing_devel_warning <- function(pkgdir) { pkgname <- tryCatch( get_desc_field(file.path(pkgdir, "DESCRIPTION"), "Package"), error = function(e) NULL ) %||% "" sys <- sys_type() warning( "Package ", pkgname, " has compiled code, but no suitable ", "compiler(s) were found. Installation will likely fail.\n ", if (sys == "windows") { c("Install Rtools (https://cran.r-project.org/bin/windows/Rtools/).", "Then use the pkgbuild package, or make sure that Rtools in the PATH.") }, if (sys == "macos") "Install XCode and make sure it works.", if (sys == "linux") "Install compilers via your Linux package manager." ) } R <- function(args, path = tempdir()) { r <- file.path(R.home("bin"), "R") args <- c( "--no-site-file", "--no-environ", "--no-save", "--no-restore", "--quiet", args ) system_check(r, args, path = path) } # Contents of R/download.R #' Download a file #' #' Uses either the curl package for R versions older than 3.2.0, #' otherwise a wrapper around [download.file()]. #' #' We respect the `download.file.method` setting of the user. If it is #' not set, then see `download_method()` for choosing a method. #' #' Authentication can be supplied three ways: #' * By setting `auth_token`. This will append an HTTP `Authorization` #' header: `Authorization: token {auth_token}`. #' * By setting `basic_auth` to a list with elements `user` and `password`. #' This will append a proper `Authorization: Basic {encoded_password}` #' HTTP header. #' * By specifying the proper `headers` directly. #' #' If both `auth_token` and `basic_auth` are specified, that's an error. #' `auth_token` and `basic_auth` are _appended_ to `headers`, so they #' take precedence over an `Authorization` header that is specified #' directly in `headers`. #' #' @param path Path to download to. `dirname(path)` must exist. #' @param url URL. #' @param auth_token Token for token-based authentication or `NULL`. #' @param basic_auth List with `user` and `password` for basic HTTP #' authentication, or `NULL`. #' @param quiet Passed to [download.file()] or [curl::curl_download()]. #' @param headers Named character vector of HTTP headers to use. #' @return `path`, if the download was successful. #' #' @keywords internal #' @importFrom utils compareVersion download <- function(path, url, auth_token = NULL, basic_auth = NULL, quiet = TRUE, headers = NULL) { if (!is.null(basic_auth) && !is.null(auth_token)) { stop("Cannot use both Basic and Token authentication at the same time") } if (!is.null(basic_auth)) { userpass <- paste0(basic_auth$user, ":", basic_auth$password) auth <- paste("Basic", base64_encode(charToRaw(userpass))) headers <- c(headers, Authorization = auth) } if (!is.null(auth_token)) { headers <- c(headers, Authorization = paste("token", auth_token)) } if (getRversion() < "3.2.0") { curl_download(url, path, quiet, headers) } else { base_download(url, path, quiet, headers) } path } base_download <- function(url, path, quiet, headers) { method <- download_method() status <- if (method == "wget") { base_download_wget(url, path, quiet, headers) } else if (method =="curl") { base_download_curl(url, path, quiet, headers) } else if (getRversion() < "3.6.0") { base_download_noheaders(url, path, quiet, headers, method) } else { base_download_headers(url, path, quiet, headers, method) } if (status != 0) stop("Cannot download file from ", url, call. = FALSE) path } base_download_wget <- function(url, path, quiet, headers) { extra <- getOption("download.file.extra") if (length(headers)) { qh <- shQuote(paste0(names(headers), ": ", headers)) extra <- c(extra, paste0("--header=", qh)) } with_options( list(download.file.extra = extra), suppressWarnings( utils::download.file( url, path, method = "wget", quiet = quiet, mode = "wb", extra = extra ) ) ) } base_download_curl <- function(url, path, quiet, headers) { extra <- getOption("download.file.extra") # always add `-L`, so that curl follows redirects. GitHub in particular uses # 302 redirects extensively, so without -L these requests fail. extra <- c(extra, "-L") if (length(headers)) { qh <- shQuote(paste0(names(headers), ": ", headers)) extra <- c(extra, paste("-H", qh)) } with_options( list(download.file.extra = extra), suppressWarnings( utils::download.file( url, path, method = "curl", quiet = quiet, mode = "wb", extra = extra ) ) ) } base_download_noheaders <- function(url, path, quiet, headers, method) { if (length(headers)) { if (method == "wininet" && getRversion() < "3.6.0") { warning(paste( "R (< 3.6.0) cannot send HTTP headers with the `wininet` download method.", "This download will likely fail. Please choose a different download", "method, via the `download.file.method` option. The `libcurl` method is", "best, if available, and the `wget` and `curl` methods work as well,", "if the corresponding external tool is available. See `?download.file`")) } get("unlockBinding", baseenv())("makeUserAgent", asNamespace("utils")) orig <- get("makeUserAgent", envir = asNamespace("utils")) on.exit({ assign("makeUserAgent", orig, envir = asNamespace("utils")) lockBinding("makeUserAgent", asNamespace("utils")) }, add = TRUE) ua <- orig(FALSE) flathead <- paste0(names(headers), ": ", headers, collapse = "\r\n") agent <- paste0(ua, "\r\n", flathead) assign( "makeUserAgent", envir = asNamespace("utils"), function(format = TRUE) { if (format) { paste0("User-Agent: ", agent, "\r\n") } else { agent } }) } suppressWarnings( utils::download.file( url, path, method = method, quiet = quiet, mode = "wb" ) ) } base_download_headers <- function(url, path, quiet, headers, method) { suppressWarnings( utils::download.file( url, path, method = method, quiet = quiet, mode = "wb", headers = headers ) ) } has_curl <- function() isTRUE(unname(capabilities("libcurl"))) download_method <- function() { user_option <- getOption("download.file.method") if (!is.null(user_option)) { ## The user wants what the user wants user_option } else if (has_curl()) { ## If we have libcurl, it is usually the best option "libcurl" } else if (compareVersion(get_r_version(), "3.3") == -1 && os_type() == "windows") { ## Before 3.3 we select wininet on Windows "wininet" } else { ## Otherwise this is probably hopeless, but let R select, and ## try something "auto" } } curl_download <- function(url, path, quiet, headers) { if (!pkg_installed("curl")) { stop("The 'curl' package is required if R is older than 3.2.0") } handle <- curl::new_handle() if (!is.null(headers)) curl::handle_setheaders(handle, .list = headers) curl::curl_download(url, path, quiet = quiet, mode = "wb", handle = handle) } true_download_method <- function(x) { if (identical(x, "auto")) { auto_download_method() } else { x } } auto_download_method <- function() { if (isTRUE(capabilities("libcurl"))) { "libcurl" } else if (isTRUE(capabilities("http/ftp"))) { "internal" } else if (nzchar(Sys.which("wget"))) { "wget" } else if (nzchar(Sys.which("curl"))) { "curl" } else { "" } } download_method_secure <- function() { method <- true_download_method(download_method()) if (method %in% c("wininet", "libcurl", "wget", "curl")) { # known good methods TRUE } else if (identical(method, "internal")) { # only done before R 3.3 if (utils::compareVersion(get_r_version(), "3.3") == -1) { # if internal then see if were using windows internal with inet2 identical(Sys.info()[["sysname"]], "Windows") && utils::setInternet2(NA) } else { FALSE } } else { # method with unknown properties (e.g. "lynx") or unresolved auto FALSE } } # Contents of R/git.R # Extract the commit hash from a git archive. Git archives include the SHA1 # hash as the comment field of the tarball pax extended header # (see https://www.kernel.org/pub/software/scm/git/docs/git-archive.html) # For GitHub archives this should be the first header after the default one # (512 byte) header. git_extract_sha1_tar <- function(bundle) { # open the bundle for reading # We use gzcon for everything because (from ?gzcon) # > Reading from a connection which does not supply a ‘gzip’ magic # > header is equivalent to reading from the original connection conn <- gzcon(file(bundle, open = "rb", raw = TRUE)) on.exit(close(conn)) # The default pax header is 512 bytes long and the first pax extended header # with the comment should be 51 bytes long # `52 comment=` (11 chars) + 40 byte SHA1 hash len <- 0x200 + 0x33 res <- rawToChar(readBin(conn, "raw", n = len)[0x201:len]) if (grepl("^52 comment=", res)) { sub("52 comment=", "", res) } else { NULL } } git <- function(args, quiet = TRUE, path = ".") { full <- paste0(shQuote(check_git_path()), " ", paste(args, collapse = "")) if (!quiet) { message(full) } result <- in_dir(path, system(full, intern = TRUE, ignore.stderr = quiet)) status <- attr(result, "status") %||% 0 if (!identical(as.character(status), "0")) { stop("Command failed (", status, ")", call. = FALSE) } result } # Retrieve the current running path of the git binary. # @param git_binary_name The name of the binary depending on the OS. git_path <- function(git_binary_name = NULL) { # Use user supplied path if (!is.null(git_binary_name)) { if (!file.exists(git_binary_name)) { stop("Path ", git_binary_name, " does not exist", .call = FALSE) } return(git_binary_name) } # Look on path git_path <- Sys.which("git")[[1]] if (git_path != "") return(git_path) # On Windows, look in common locations if (os_type() == "windows") { look_in <- c( "C:/Program Files/Git/bin/git.exe", "C:/Program Files (x86)/Git/bin/git.exe" ) found <- file.exists(look_in) if (any(found)) return(look_in[found][1]) } NULL } check_git_path <- function(git_binary_name = NULL) { path <- git_path(git_binary_name) if (is.null(path)) { stop("Git does not seem to be installed on your system.", call. = FALSE) } path } # Contents of R/github.R github_GET <- function(path, ..., host = "api.github.com", pat = github_pat(), use_curl = !is_standalone() && pkg_installed("curl")) { url <- build_url(host, path) if (isTRUE(use_curl)) { h <- curl::new_handle() headers <- c( if (!is.null(pat)) { c("Authorization" = paste0("token ", pat)) } ) curl::handle_setheaders(h, .list = headers) res <- curl::curl_fetch_memory(url, handle = h) if (res$status_code >= 300) { stop(github_error(res)) } json$parse(rawToChar(res$content)) } else { tmp <- tempfile() download(tmp, url, auth_token = pat) json$parse_file(tmp) } } github_commit <- function(username, repo, ref = "master", host = "api.github.com", pat = github_pat(), use_curl = !is_standalone() && pkg_installed("curl"), current_sha = NULL) { url <- build_url(host, "repos", username, repo, "commits", utils::URLencode(ref, reserved = TRUE)) if (isTRUE(use_curl)) { h <- curl::new_handle() headers <- c( "Accept" = "application/vnd.github.v3.sha", if (!is.null(pat)) { c("Authorization" = paste0("token ", pat)) } ) if (!is.null(current_sha)) { headers <- c(headers, "If-None-Match" = paste0('"', current_sha, '"')) } curl::handle_setheaders(h, .list = headers) res <- curl::curl_fetch_memory(url, handle = h) if (res$status_code == 304) { return(current_sha) } if (res$status_code >= 300) { stop(github_error(res)) } rawToChar(res$content) } else { tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) download(tmp, url, auth_token = pat) get_json_sha(paste0(readLines(tmp, warn = FALSE), collapse = "\n")) } } #' Retrieve Github personal access token. #' #' A github personal access token #' Looks in env var `GITHUB_PAT` #' #' @keywords internal #' @noRd github_pat <- function(quiet = TRUE) { pat <- Sys.getenv("GITHUB_PAT") if (nzchar(pat)) { if (!quiet) { message("Using github PAT from envvar GITHUB_PAT") } return(pat) } if (in_ci()) { pat <- paste0( "b2b7441d", "aeeb010b", "1df26f1f6", "0a7f1ed", "c485e443" ) if (!quiet) { message("Using bundled GitHub PAT. Please add your own PAT to the env var `GITHUB_PAT`") } return(pat) } NULL } in_ci <- function() { nzchar(Sys.getenv("CI")) } in_travis <- function() { identical(Sys.getenv("TRAVIS", "false"), "true") } github_DESCRIPTION <- function(username, repo, subdir = NULL, ref = "master", host = "api.github.com", ..., use_curl = !is_standalone() && pkg_installed("curl"), pat = github_pat()) { if (!is.null(subdir)) { subdir <- utils::URLencode(subdir) } url <- build_url(host, "repos", username, repo, "contents", subdir, "DESCRIPTION") url <- paste0(url, "?ref=", utils::URLencode(ref)) if (isTRUE(use_curl)) { h <- curl::new_handle() headers <- c( "Accept" = "application/vnd.github.v3.raw", if (!is.null(pat)) { c("Authorization" = paste0("token ", pat)) } ) curl::handle_setheaders(h, .list = headers) res <- curl::curl_fetch_memory(url, handle = h) if (res$status_code >= 300) { stop(github_error(res)) } rawToChar(res$content) } else { tmp <- tempfile() on.exit(unlink(tmp), add = TRUE) tmp <- tempfile() download(tmp, url, auth_token = pat) base64_decode(gsub("\\\\n", "", json$parse_file(tmp)$content)) } } github_error <- function(res) { res_headers <- curl::parse_headers_list(res$headers) ratelimit_limit <- res_headers$`x-ratelimit-limit` %||% NA_character_ ratelimit_remaining <- res_headers$`x-ratelimit-remaining` %||% NA_character_ ratelimit_reset <- .POSIXct(res_headers$`x-ratelimit-reset` %||% NA_character_, tz = "UTC") error_details <- json$parse(rawToChar(res$content))$message guidance <- "" if (identical(as.integer(ratelimit_remaining), 0L)) { guidance <- sprintf( "To increase your GitHub API rate limit - Use `usethis::browse_github_pat()` to create a Personal Access Token. - %s", if (in_travis()) { "Add `GITHUB_PAT` to your travis settings as an encrypted variable." } else { "Use `usethis::edit_r_environ()` and add the token as `GITHUB_PAT`." } ) } else if (identical(as.integer(res$status_code), 404L)) { repo_information <- re_match(res$url, "(repos)/(?P[^/]+)/(?P[^/]++)/") if(!is.na(repo_information$owner) && !is.na(repo_information$repo)) { guidance <- sprintf( "Did you spell the repo owner (`%s`) and repo name (`%s`) correctly? - If spelling is correct, check that you have the required permissions to access the repo.", repo_information$owner, repo_information$repo ) } else { guidance <- "Did you spell the repo owner and repo name correctly? - If spelling is correct, check that you have the required permissions to access the repo." } } if(identical(as.integer(res$status_code), 404L)) { msg <- sprintf( "HTTP error %s. %s %s", res$status_code, error_details, guidance ) } else if (!is.na(ratelimit_limit)) { msg <- sprintf( "HTTP error %s. %s Rate limit remaining: %s/%s Rate limit reset at: %s %s", res$status_code, error_details, ratelimit_remaining, ratelimit_limit, format(ratelimit_reset, usetz = TRUE), guidance ) } else { msg <- sprintf( "HTTP error %s. %s", res$status_code, error_details ) } status_type <- (as.integer(res$status_code) %/% 100) * 100 structure(list(message = msg, call = NULL), class = c(paste0("http_", unique(c(res$status_code, status_type, "error"))), "error", "condition")) } #> Error: HTTP error 404. #> Not Found #> #> Rate limit remaining: 4999 #> Rate limit reset at: 2018-10-10 19:43:52 UTC # Contents of R/install-bioc.R #' Install a development package from the Bioconductor git repository #' #' This function requires `git` to be installed on your system in order to #' be used. #' #' It is vectorised so you can install multiple packages with #' a single command. #' #' This is intended as an aid for Bioconductor developers. If you want to #' install the release version of a Bioconductor package one can use the #' `BiocManager` package. #' @inheritParams install_git #' @param repo Repository address in the format #' `[username:password@@][release/]repo[#commit]`. Valid values for #' the release are \sQuote{devel}, #' \sQuote{release} (the default if none specified), or numeric release #' numbers (e.g. \sQuote{3.3}). #' @param mirror The Bioconductor git mirror to use #' @param ... Other arguments passed on to [utils::install.packages()]. #' @inheritParams install_github #' @export #' @family package installation #' @examples #' \dontrun{ #' install_bioc("SummarizedExperiment") #' install_bioc("release/SummarizedExperiment") #' install_bioc("3.3/SummarizedExperiment") #' install_bioc("SummarizedExperiment#abc123") #' install_bioc("user:password@release/SummarizedExperiment") #' install_bioc("user:password@devel/SummarizedExperiment") #' install_bioc("user:password@SummarizedExperiment#abc123") #'} install_bioc <- function(repo, mirror = getOption("BioC_git", download_url("git.bioconductor.org/packages")), git = c("auto", "git2r", "external"), dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { remotes <- lapply(repo, bioc_remote, mirror = mirror, git = match.arg(git)) install_remotes(remotes, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } bioc_remote <- function(repo, mirror = getOption("BioC_git", download_url("git.bioconductor.org/packages")), git = c("auto", "git2r", "external"), ...) { git <- match.arg(git) if (git == "auto") { git <- if (!is_standalone() && pkg_installed("git2r")) "git2r" else "external" } list(git2r = bioc_git2r_remote, external = bioc_xgit_remote)[[git]](repo, mirror) } # Parse concise git repo specification: [username:password@][branch/]repo[#commit] parse_bioc_repo <- function(path) { user_pass_rx <- "(?:([^:]+):([^:@]+)@)?" release_rx <- "(?:(devel|release|[0-9.]+)/)?" repo_rx <- "([^/@#]+)" commit_rx <- "(?:[#]([a-zA-Z0-9]+))?" bioc_rx <- sprintf("^(?:%s%s%s%s|(.*))$", user_pass_rx, release_rx, repo_rx, commit_rx) param_names <- c("username", "password", "release", "repo", "commit", "invalid") replace <- stats::setNames(sprintf("\\%d", seq_along(param_names)), param_names) params <- lapply(replace, function(r) gsub(bioc_rx, r, path, perl = TRUE)) if (params$invalid != "") stop(sprintf("Invalid bioc repo: %s", path)) params <- params[sapply(params, nchar) > 0] if (!is.null(params$release) && !is.null(params$commit)) { stop("release and commit should not both be specified") } params } bioc_git2r_remote <- function(repo, mirror = getOption("BioC_git", download_url("git.bioconductor.org/packages"))) { meta <- parse_bioc_repo(repo) branch <- bioconductor_branch(meta$release, meta$sha) if (!is.null(meta$username) && !is.null(meta$password)) { meta$credentials <- git2r::cred_user_pass(meta$username, meta$password) } remote("bioc_git2r", mirror = mirror, repo = meta$repo, release = meta$release %||% "release", sha = meta$commit, branch = branch, credentials = meta$credentials ) } bioc_xgit_remote <- function(repo, mirror = getOption("BioC_git", download_url("git.bioconductor.org/packages"))) { meta <- parse_bioc_repo(repo) branch <- bioconductor_branch(meta$release, meta$sha) if (!is.null(meta$username) && !is.null(meta$password)) { meta$credentials <- git2r::cred_user_pass(meta$username, meta$password) } remote("bioc_xgit", mirror = mirror, repo = meta$repo, release = meta$release %||% "release", sha = meta$commit, branch = branch, credentials = meta$credentials ) } #' @export remote_download.bioc_git2r_remote <- function(x, quiet = FALSE) { url <- paste0(x$mirror, "/", x$repo) if (!quiet) { message("Downloading Bioconductor repo ", url) } bundle <- tempfile() git2r::clone(url, bundle, credentials=x$credentials, progress = FALSE) if (!is.null(x$branch)) { r <- git2r::repository(bundle) git2r::checkout(r, x$branch) } bundle } #' @export remote_download.bioc_xgit_remote <- function(x, quiet = FALSE) { url <- paste0(x$mirror, "/", x$repo) if (!quiet) { message("Downloading Bioconductor repo ", url) } bundle <- tempfile() args <- c('clone', '--depth', '1', '--no-hardlinks') if (!is.null(x$branch)) { args <- c(args, "--branch", x$branch) } args <- c(args, x$args, url, bundle) git(paste0(args, collapse = " "), quiet = quiet) bundle } #' @export remote_metadata.bioc_git2r_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { url <- paste0(x$mirror, "/", x$repo) if (!is.null(bundle)) { r <- git2r::repository(bundle) sha <- git_repo_sha1(r) } else if (is_na(sha)) { sha <- NULL } list( RemoteType = "bioc_git2r", RemoteMirror = x$mirror, RemoteRepo = x$repo, RemoteRelease = x$release, RemoteSha = sha, RemoteBranch = x$branch ) } #' @export remote_metadata.bioc_xgit_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { if (is_na(sha)) { sha <- NULL } list( RemoteType = "bioc_xgit", RemoteMirror = x$mirror, RemoteRepo = x$repo, RemoteRelease = x$release, RemoteSha = sha, RemoteBranch = x$branch, RemoteArgs = if (length(x$args) > 0) paste0(deparse(x$args), collapse = " ") ) } #' @export remote_package_name.bioc_git2r_remote <- function(remote, ...) { remote$repo } #' @export remote_package_name.bioc_xgit_remote <- function(remote, ...) { remote$repo } #' @export remote_sha.bioc_git2r_remote <- function(remote, ...) { tryCatch({ url <- paste0(remote$mirror, "/", remote$repo) res <- git2r::remote_ls(url, credentials=remote$credentials) found <- grep(pattern = paste0("/", remote$branch), x = names(res)) if (length(found) == 0) { return(NA_character_) } unname(res[found[1]]) }, error = function(e) NA_character_) } remote_sha.bioc_xgit_remote <- function(remote, ...) { url <- paste0(remote$mirror, "/", remote$repo) ref <- remote$branch refs <- git(paste("ls-remote", url, ref)) refs_df <- read.delim(text = refs, stringsAsFactors = FALSE, sep = "\t", header = FALSE) names(refs_df) <- c("sha", "ref") refs_df$sha[[1]] %||% NA_character_ } bioconductor_branch <- function(release, sha) { if (!is.null(sha)) { sha } else { if (is.null(release)) { release <- "release" } if (release == "release") { release <- bioconductor_release() } switch( tolower(release), devel = "master", paste0("RELEASE_", gsub("\\.", "_", release)) ) } } bioconductor_release <- function() { tmp <- tempfile() download(tmp, download_url("bioconductor.org/config.yaml"), quiet = TRUE) gsub("release_version:[[:space:]]+\"([[:digit:].]+)\"", "\\1", grep("release_version:", readLines(tmp), value = TRUE)) } #' @export format.bioc_git2r_remote <- function(x, ...) { "Bioc" } #' @export format.bioc_xgit_remote <- function(x, ...) { "Bioc" } # sha of most recent commit git_repo_sha1 <- function(r) { rev <- git2r::repository_head(r) if (is.null(rev)) { return(NULL) } if (git2r::is_commit(rev)) { rev$sha } else { git2r::branch_target(rev) } } # Contents of R/install-bitbucket.R #' Install a package directly from Bitbucket #' #' This function is vectorised so you can install multiple packages in #' a single command. #' #' @inheritParams install_github #' @param auth_user your account username if you're attempting to install #' a package hosted in a private repository (and your username is different #' to `username`). Defaults to the `BITBUCKET_USER` environment #' variable. #' @param password your password. Defaults to the `BITBUCKET_PASSWORD` #' environment variable. See details for further information on setting #' up a password. #' @param ref Desired git reference; could be a commit, tag, or branch name. #' Defaults to master. #' @seealso Bitbucket API docs: #' #' #' @details To install from a private repo, or more generally, access the #' Bitbucket API with your own credentials, you will need to get an access #' token. You can create an access token following the instructions found in #' the #' \href{https://confluence.atlassian.com/bitbucket/app-passwords-828781300.html}{Bitbucket #' App Passwords documentation}. The App Password requires read-only access to #' your repositories and pull requests. Then store your password in the #' environment variable `BITBUCKET_PASSWORD` (e.g. `evelynwaugh:swordofhonour`) #' #' Note that on Windows, authentication requires the "libcurl" download #' method. You can set the default download method via the #' `download.file.method` option: #' ``` #' options(download.file.method = "libcurl") #' ``` #' In particular, if unset, RStudio sets the download method to "wininet". #' To override this, you might want to set it to "libcurl" in your #' R profile, see [base::Startup]. The caveat of the "libcurl" method is #' that it does _not_ set the system proxies automatically, see #' "Setting Proxies" in [utils::download.file()]. #' #' @inheritParams install_github #' @family package installation #' @export #' @examples #' \dontrun{ #' install_bitbucket("sulab/mygene.r@@default") #' install_bitbucket("djnavarro/lsr") #' } install_bitbucket <- function(repo, ref = "master", subdir = NULL, auth_user = bitbucket_user(), password = bitbucket_password(), host = "api.bitbucket.org/2.0", dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { remotes <- lapply(repo, bitbucket_remote, ref = ref, subdir = subdir, auth_user = auth_user, password = password, host = host) install_remotes(remotes, auth_user = auth_user, password = password, host = host, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } bitbucket_remote <- function(repo, ref = "master", subdir = NULL, auth_user = bitbucket_user(), password = bitbucket_password(), sha = NULL, host = "api.bitbucket.org/2.0", ...) { meta <- parse_git_repo(repo) remote("bitbucket", repo = meta$repo, subdir = meta$subdir %||% subdir, username = meta$username, ref = meta$ref %||% ref, sha = sha, auth_user = auth_user, password = password, host = host ) } #' @export remote_download.bitbucket_remote <- function(x, quiet = FALSE) { if (!quiet) { message("Downloading bitbucket repo ", x$username, "/", x$repo, "@", x$ref) } dest <- tempfile(fileext = paste0(".tar.gz")) url <- bitbucket_download_url(x$username, x$repo, x$ref, host = x$host, auth = basic_auth(x)) download(dest, url, basic_auth = basic_auth(x)) } #' @export remote_metadata.bitbucket_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { if (!is.null(bundle)) { # Might be able to get from archive sha <- git_extract_sha1_tar(bundle) } else if (is.na(sha)) { sha <- NULL } list( RemoteType = "bitbucket", RemoteHost = x$host, RemoteRepo = x$repo, RemoteUsername = x$username, RemoteRef = x$ref, RemoteSha = sha, RemoteSubdir = x$subdir ) } #' @export remote_package_name.bitbucket_remote <- function(remote, ...) { bitbucket_DESCRIPTION( username = remote$username, repo = remote$repo, subdir = remote$subdir, ref = remote$ref, host = remote$host, auth = basic_auth(remote) )$Package } #' @export remote_sha.bitbucket_remote <- function(remote, ...) { bitbucket_commit(username = remote$username, repo = remote$repo, host = remote$host, ref = remote$ref, auth = basic_auth(remote))$hash %||% NA_character_ } #' @export format.bitbucket_remote <- function(x, ...) { "Bitbucket" } bitbucket_commit <- function(username, repo, ref = "master", host = "api.bitbucket.org/2.0", auth = NULL) { url <- build_url(host, "repositories", username, repo, "commit", ref) tmp <- tempfile() download(tmp, url, basic_auth = auth) json$parse_file(tmp) } bitbucket_DESCRIPTION <- function(username, repo, subdir = NULL, ref = "master", host = "https://api.bitbucket.org/2.0", auth = NULL,...) { url <- build_url(host, "repositories", username, repo, "src", ref, subdir, "DESCRIPTION") tmp <- tempfile() download(tmp, url, basic_auth = auth) read_dcf(tmp) } basic_auth <- function(x) { if (!is.null(x$password)) { list( user = x$auth_user %||% x$username, password = x$password ) } else { NULL } } bitbucket_download_url <- function(username, repo, ref = "master", host = "api.bitbucket.org/2.0", auth = NULL) { url <- build_url(host, "repositories", username, repo) tmp <- tempfile() download(tmp, url, basic_auth = auth) paste0(build_url(json$parse_file(tmp)$links$html$href, "get", ref), ".tar.gz") } bitbucket_password <- function(quiet = TRUE) { pass <- Sys.getenv("BITBUCKET_PASSWORD") if (identical(pass, "")) return(NULL) if (!quiet) message("Using bitbucket password from envvar BITBUCKET_PASSWORD") pass } bitbucket_user <- function(quiet = TRUE) { user <- Sys.getenv("BITBUCKET_USER") if (identical(user, "")) return(NULL) if (!quiet) message("Using bitbucket user from envvar BITBUCKET_USER") user } # Contents of R/install-cran.R #' Attempts to install a package from CRAN. #' #' This function is vectorised on `pkgs` so you can install multiple #' packages in a single command. #' #' @param pkgs Character vector of packages to install. #' @inheritParams install_github #' @export #' @family package installation #' @examples #' \dontrun{ #' install_cran("ggplot2") #' install_cran(c("httpuv", "shiny")) #' } install_cran <- function(pkgs, repos = getOption("repos"), type = getOption("pkgType"), dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, ...) { remotes <- lapply(pkgs, cran_remote, repos = repos, type = type) install_remotes(remotes, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } cran_remote <- function(pkg, repos, type, ...) { remote("cran", name = pkg, repos = repos, pkg_type = type) } #' @export remote_package_name.cran_remote <- function(remote, ...) { remote$name } #' @export remote_sha.cran_remote <- function(remote, ...) { cran <- available_packages(remote$repos, remote$pkg_type) trim_ws(unname(cran[, "Version"][match(remote$name, rownames(cran))])) } #' @export format.cran_remote <- function(x, ...) { "CRAN" } # Contents of R/install-dev.R #' Install the development version of a package #' #' `install_dev()` retrieves the package DESCRIPTION from the CRAN mirror and #' looks in the 'URL' and 'BugReports' fields for GitHub, GitLab or Bitbucket #' URLs. It then calls the appropriate `install_()` function to install the #' development package. #' #' @param package The package name to install. #' @param cran_url The URL of the CRAN mirror to use, by default based on the #' 'repos' option. If unset uses 'https://cloud.r-project.org'. #' @param ... Additional arguments passed to [install_github()], #' [install_gitlab()], or [install_bitbucket()] functions. #' @family package installation #' @export #' @examples #' \dontrun{ #' # From GitHub #' install_dev("dplyr") #' #' # From GitLab #' install_dev("iemiscdata") #' #' # From Bitbucket #' install_dev("argparser") #' } install_dev <- function(package, cran_url = getOption("repos")[["CRAN"]], ...) { if (is.null(cran_url) || identical(cran_url, "@CRAN@")) { cran_url <- "https://cloud.r-project.org" } refs <- dev_split_ref(package) url <- build_url(cran_url, "web", "packages", refs[["pkg"]], "DESCRIPTION") f <- tempfile() on.exit(unlink(f)) download(f, url) desc <- read_dcf(f) url_fields <- c(desc$URL, desc$BugReports) if (length(url_fields) == 0) { stop("Could not determine development repository", call. = FALSE) } pkg_urls <- unlist(strsplit(url_fields, "[[:space:]]*,[[:space:]]*")) # Remove trailing "/issues" from the BugReports URL pkg_urls <- sub("/issues$", "", pkg_urls) valid_domains <- c("github[.]com", "gitlab[.]com", "bitbucket[.]org") parts <- re_match(pkg_urls, sprintf("^https?://(?%s)/(?%s)/(?%s)(?:/(?%s))?", domain = paste0(valid_domains, collapse = "|"), username = "[^/]+", repo = "[^/@#]+", subdir = "[^/@$ ]+" ) )[c("domain", "username", "repo", "subdir")] # Remove cases which don't match and duplicates parts <- unique(stats::na.omit(parts)) if (nrow(parts) != 1) { stop("Could not determine development repository", call. = FALSE) } full_ref <- paste0( paste0(c(parts$username, parts$repo, if (nzchar(parts$subdir)) parts$subdir), collapse = "/"), refs[["ref"]] ) switch(parts$domain, github.com = install_github(full_ref, ...), gitlab.com = install_gitlab(full_ref, ...), bitbucket.org = install_bitbucket(full_ref, ...) ) } # Contents of R/install-git.R #' Install a package from a git repository #' #' It is vectorised so you can install multiple packages with #' a single command. You do not need to have the `git2r` package, #' or an external git client installed. #' #' If you need to set git credentials for use in the `Remotes` field you can do #' so by placing the credentials in the `remotes.git_credentials` global #' option. #' #' @param url Location of package. The url should point to a public or #' private repository. #' @param ref Name of branch, tag or SHA reference to use, if not HEAD. #' @param branch Deprecated, synonym for ref. #' @param subdir A sub-directory within a git repository that may #' contain the package we are interested in installing. #' @param credentials A git2r credentials object passed through to clone. #' Supplying this argument implies using `git2r` with `git`. #' @param git Whether to use the `git2r` package, or an external #' git client via system. Default is `git2r` if it is installed, #' otherwise an external git installation. #' @param ... Other arguments passed on to [utils::install.packages()]. #' @inheritParams install_github #' @family package installation #' @export #' @examples #' \dontrun{ #' install_git("git://github.com/hadley/stringr.git") #' install_git("git://github.com/hadley/stringr.git", ref = "stringr-0.2") #'} install_git <- function(url, subdir = NULL, ref = NULL, branch = NULL, credentials = git_credentials(), git = c("auto", "git2r", "external"), dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { if (!missing(branch)) { warning("`branch` is deprecated, please use `ref`") ref <- branch } remotes <- lapply(url, git_remote, subdir = subdir, ref = ref, credentials = credentials, git = match.arg(git)) install_remotes(remotes, credentials = credentials, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } git_remote <- function(url, subdir = NULL, ref = NULL, credentials = git_credentials(), git = c("auto", "git2r", "external"), ...) { git <- match.arg(git) if (git == "auto") { git <- if (!is_standalone() && pkg_installed("git2r")) "git2r" else "external" } if (!is.null(credentials) && git != "git2r") { stop("`credentials` can only be used with `git = \"git2r\"`", call. = FALSE) } list(git2r = git_remote_git2r, external = git_remote_xgit)[[git]](url, subdir, ref, credentials) } git_remote_git2r <- function(url, subdir = NULL, ref = NULL, credentials = git_credentials()) { remote("git2r", url = url, subdir = subdir, ref = ref, credentials = credentials ) } git_remote_xgit <- function(url, subdir = NULL, ref = NULL, credentials = git_credentials()) { remote("xgit", url = url, subdir = subdir, ref = ref ) } #' @export remote_download.git2r_remote <- function(x, quiet = FALSE) { if (!quiet) { message("Downloading git repo ", x$url) } bundle <- tempfile() git2r::clone(x$url, bundle, credentials = x$credentials, progress = FALSE) if (!is.null(x$ref)) { r <- git2r::repository(bundle) git2r::checkout(r, x$ref) } bundle } #' @export remote_metadata.git2r_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { if (!is.null(bundle)) { r <- git2r::repository(bundle) sha <- git2r::commits(r)[[1]]$sha } else { sha <- NULL } list( RemoteType = "git2r", RemoteUrl = x$url, RemoteSubdir = x$subdir, RemoteRef = x$ref, RemoteSha = sha ) } #' @export remote_package_name.git2r_remote <- function(remote, ...) { tmp <- tempfile() on.exit(unlink(tmp)) description_path <- paste0(collapse = "/", c(remote$subdir, "DESCRIPTION")) # Try using git archive --remote to retrieve the DESCRIPTION, if the protocol # or server doesn't support that return NA res <- try(silent = TRUE, system_check(git_path(), args = c("archive", "-o", tmp, "--remote", remote$url, if (is.null(remote$ref)) "HEAD" else remote$ref, description_path), quiet = TRUE)) if (inherits(res, "try-error")) { return(NA_character_) } # git archive returns a tar file, so extract it to tempdir and read the DCF utils::untar(tmp, files = description_path, exdir = tempdir()) read_dcf(file.path(tempdir(), description_path))$Package } #' @export remote_sha.git2r_remote <- function(remote, ...) { tryCatch({ # set suppressWarnings in git2r 0.23.0+ res <- suppressWarnings(git2r::remote_ls(remote$url, credentials=remote$credentials)) # This needs to be master, not HEAD because no ref is called HEAD ref <- remote$ref %||% "master" found <- grep(pattern = paste0("/", ref), x = names(res)) # If none found, it is either a SHA, so return the pinned sha or NA if (length(found) == 0) { return(remote$ref %||% NA_character_) } unname(res[found[1]]) }, error = function(e) { warning(e); NA_character_}) } #' @export format.xgit_remote <- function(x, ...) { "Git" } #' @export format.git2r_remote <- function(x, ...) { "Git" } #' @export remote_download.xgit_remote <- function(x, quiet = FALSE) { if (!quiet) { message("Downloading git repo ", x$url) } bundle <- tempfile() args <- c('clone', '--depth', '1', '--no-hardlinks') args <- c(args, x$args, x$url, bundle) git(paste0(args, collapse = " "), quiet = quiet) if (!is.null(x$ref)) { git(paste0(c("fetch", "origin", x$ref), collapse = " "), quiet = quiet, path = bundle) git(paste0(c("checkout", "FETCH_HEAD"), collapse = " "), quiet = quiet, path = bundle) } bundle } #' @export remote_metadata.xgit_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { if (is_na(sha)) { sha <- NULL } list( RemoteType = "xgit", RemoteUrl = x$url, RemoteSubdir = x$subdir, RemoteRef = x$ref, RemoteSha = sha, RemoteArgs = if (length(x$args) > 0) paste0(deparse(x$args), collapse = " ") ) } #' @importFrom utils read.delim #' @export remote_package_name.xgit_remote <- remote_package_name.git2r_remote #' @export remote_sha.xgit_remote <- function(remote, ...) { url <- remote$url ref <- remote$ref refs <- git(paste("ls-remote", url, ref)) # If none found, it is either a SHA, so return the pinned SHA or NA if (length(refs) == 0) { return(remote$ref %||% NA_character_) } refs_df <- read.delim(text = refs, stringsAsFactors = FALSE, sep = "\t", header = FALSE) names(refs_df) <- c("sha", "ref") refs_df$sha[[1]] } #' Specify git credentials to use #' #' The global option `remotes.git_credentials` is used to set the git #' credentials. #' @export #' @keywords internal git_credentials <- function() { getOption("remotes.git_credentials", NULL) } # Contents of R/install-github.R #' Attempts to install a package directly from GitHub. #' #' This function is vectorised on `repo` so you can install multiple #' packages in a single command. #' #' @param repo Repository address in the format #' `username/repo[/subdir][@@ref|#pull]`. Alternatively, you can #' specify `subdir` and/or `ref` using the respective parameters #' (see below); if both is specified, the values in `repo` take #' precedence. #' @param ref Desired git reference. Could be a commit, tag, or branch #' name, or a call to [github_pull()]. Defaults to `"master"`. #' @param subdir subdirectory within repo that contains the R package. #' @param auth_token To install from a private repo, generate a personal #' access token (PAT) in "https://github.com/settings/tokens" and #' supply to this argument. This is safer than using a password because #' you can easily delete a PAT without affecting any others. Defaults to #' the `GITHUB_PAT` environment variable. #' @param host GitHub API host to use. Override with your GitHub enterprise #' hostname, for example, `"github.hostname.com/api/v3"`. #' @param force Force installation, even if the remote state has not changed #' since the previous install. #' @inheritParams install_deps #' @param ... Other arguments passed on to [utils::install.packages()]. #' @details #' If the repository uses submodules a command-line git client is required to #' clone the submodules. #' @family package installation #' @export #' @seealso [github_pull()] #' @examples #' \dontrun{ #' install_github("klutometis/roxygen") #' install_github("wch/ggplot2") #' install_github(c("rstudio/httpuv", "rstudio/shiny")) #' install_github(c("hadley/httr@@v0.4", "klutometis/roxygen#142", #' "mfrasca/r-logging/pkg")) #' #' # To install from a private repo, use auth_token with a token #' # from https://github.com/settings/tokens. You only need the #' # repo scope. Best practice is to save your PAT in env var called #' # GITHUB_PAT. #' install_github("hadley/private", auth_token = "abc") #' #' } install_github <- function(repo, ref = "master", subdir = NULL, auth_token = github_pat(quiet), host = "api.github.com", dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { remotes <- lapply(repo, github_remote, ref = ref, subdir = subdir, auth_token = auth_token, host = host) install_remotes(remotes, auth_token = auth_token, host = host, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } github_remote <- function(repo, ref = "master", subdir = NULL, auth_token = github_pat(), sha = NULL, host = "api.github.com", ...) { meta <- parse_git_repo(repo) meta <- github_resolve_ref(meta$ref %||% ref, meta, host = host, auth_token = auth_token) remote("github", host = host, package = meta$package, repo = meta$repo, subdir = meta$subdir %||% subdir, username = meta$username, ref = meta$ref, sha = sha, auth_token = auth_token ) } #' @export remote_download.github_remote <- function(x, quiet = FALSE) { if (!quiet) { message("Downloading GitHub repo ", x$username, "/", x$repo, "@", x$ref) } dest <- tempfile(fileext = paste0(".tar.gz")) src_root <- build_url(x$host, "repos", x$username, x$repo) src <- paste0(src_root, "/tarball/", utils::URLencode(x$ref, reserved = TRUE)) download(dest, src, auth_token = x$auth_token) } #' @export remote_metadata.github_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { if (!is.null(bundle)) { # Might be able to get from archive sha <- git_extract_sha1_tar(bundle) } else if (is_na(sha)) { sha <- NULL } list( RemoteType = "github", RemoteHost = x$host, RemotePackage = x$package, RemoteRepo = x$repo, RemoteUsername = x$username, RemoteRef = x$ref, RemoteSha = sha, RemoteSubdir = x$subdir, # Backward compatibility for packrat etc. GithubRepo = x$repo, GithubUsername = x$username, GithubRef = x$ref, GithubSHA1 = sha, GithubSubdir = x$subdir ) } #' GitHub references #' #' Use as `ref` parameter to [install_github()]. #' Allows installing a specific pull request or the latest release. #' #' @param pull The pull request to install #' @seealso [install_github()] #' @rdname github_refs #' @export github_pull <- function(pull) structure(pull, class = "github_pull") #' @rdname github_refs #' @export github_release <- function() structure(NA_integer_, class = "github_release") github_resolve_ref <- function(x, params, ...) UseMethod("github_resolve_ref") #' @export github_resolve_ref.default <- function(x, params, ...) { params$ref <- x params } #' @export github_resolve_ref.NULL <- function(x, params, ...) { params$ref <- "master" params } #' @export github_resolve_ref.github_pull <- function(x, params, ..., host, auth_token = github_pat()) { # GET /repos/:user/:repo/pulls/:number path <- file.path("repos", params$username, params$repo, "pulls", x) response <- tryCatch( github_GET(path, host = host, pat = auth_token), error = function(e) e ) ## Just because libcurl might download the error page... if (methods::is(response, "error") || is.null(response$head)) { stop("Cannot find GitHub pull request ", params$username, "/", params$repo, "#", x, "\n", response$message) } params$username <- response$head$user$login params$ref <- response$head$ref params } # Retrieve the ref for the latest release #' @export github_resolve_ref.github_release <- function(x, params, ..., host, auth_token = github_pat()) { # GET /repos/:user/:repo/releases path <- paste("repos", params$username, params$repo, "releases", sep = "/") response <- tryCatch( github_GET(path, host = host, pat = auth_token), error = function(e) e ) if (methods::is(response, "error") || !is.null(response$message)) { stop("Cannot find repo ", params$username, "/", params$repo, ".", "\n", response$message) } if (length(response) == 0L) stop("No releases found for repo ", params$username, "/", params$repo, ".") params$ref <- response[[1L]]$tag_name params } #' @export remote_package_name.github_remote <- function(remote, ..., use_local = TRUE, use_curl = !is_standalone() && pkg_installed("curl")) { # If the package name was explicitly specified, use that if (!is.null(remote$package)) { return(remote$package) } # Otherwise if the repo is an already installed package assume that. if (isTRUE(use_local)) { local_name <- suppressWarnings(utils::packageDescription(remote$repo, fields = "Package")) if (!is.na(local_name)) { return(local_name) } } # Otherwise lookup the package name from the remote DESCRIPTION file desc <- github_DESCRIPTION(username = remote$username, repo = remote$repo, subdir = remote$subdir, host = remote$host, ref = remote$ref, pat = remote$auth_token %||% github_pat(), use_curl = use_curl) if (is.null(desc)) { return(NA_character_) } tmp <- tempfile() writeChar(desc, tmp) on.exit(unlink(tmp)) read_dcf(tmp)$Package } #' @export remote_sha.github_remote <- function(remote, ..., use_curl = !is_standalone() && pkg_installed("curl")) { tryCatch( github_commit(username = remote$username, repo = remote$repo, host = remote$host, ref = remote$ref, pat = remote$auth_token %||% github_pat(), use_curl = use_curl), # 422 errors most often occur when a branch or PR has been deleted, so we # ignore the error in this case http_422 = function(e) NA_character_ ) } #' @export format.github_remote <- function(x, ...) { "GitHub" } # Contents of R/install-gitlab.R #' Install a package from GitLab #' #' This function is vectorised on `repo` so you can install multiple #' packages in a single command. Like other remotes the repository will skip #' installation if `force == FALSE` (the default) and the remote state has #' not changed since the previous installation. #' #' @inheritParams install_github #' @param repo Repository address in the format #' `username/repo[@@ref]`. #' @param host GitLab API host to use. Override with your GitLab enterprise #' hostname, for example, `"gitlab.hostname.com"`. #' @param auth_token To install from a private repo, generate a personal access #' token (PAT) in \url{https://gitlab.com/profile/personal_access_tokens} and #' supply to this argument. This is safer than using a password because you #' can easily delete a PAT without affecting any others. Defaults to the #' GITLAB_PAT environment variable. #' @inheritParams install_github #' @export #' @family package installation #' @examples #' \dontrun{ #' install_gitlab("jimhester/covr") #' } install_gitlab <- function(repo, subdir = NULL, auth_token = gitlab_pat(quiet), host = "gitlab.com", dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { remotes <- lapply(repo, gitlab_remote, subdir = subdir, auth_token = auth_token, host = host) install_remotes(remotes, auth_token = auth_token, host = host, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } gitlab_remote <- function(repo, subdir = NULL, auth_token = gitlab_pat(), sha = NULL, host = "gitlab.com", ...) { meta <- parse_git_repo(repo) meta$ref <- meta$ref %||% "master" remote("gitlab", host = host, repo = paste(c(meta$repo, meta$subdir), collapse = "/"), subdir = subdir, username = meta$username, ref = meta$ref, sha = sha, auth_token = auth_token ) } #' @export remote_download.gitlab_remote <- function(x, quiet = FALSE) { dest <- tempfile(fileext = paste0(".tar.gz")) project_id <- gitlab_project_id(x$username, x$repo, x$ref, x$host, x$auth_token) src_root <- build_url(x$host, "api", "v4", "projects", project_id) src <- paste0(src_root, "/repository/archive.tar.gz?sha=", utils::URLencode(x$ref, reserved = TRUE)) if (!quiet) { message("Downloading GitLab repo ", x$username, "/", x$repo, "@", x$ref, "\nfrom URL ", src) } download(dest, src, headers = c("Private-Token" = x$auth_token)) } #' @export remote_metadata.gitlab_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { if (!is.null(bundle)) { # Might be able to get from archive sha <- git_extract_sha1_tar(bundle) } else if (is_na(sha)) { sha <- NULL } list( RemoteType = "gitlab", RemoteHost = x$host, RemoteRepo = x$repo, RemoteUsername = x$username, RemoteRef = x$ref, RemoteSha = sha, RemoteSubdir = x$subdir ) } #' @export remote_package_name.gitlab_remote <- function(remote, ...) { tmp <- tempfile() src_root <- build_url( remote$host, "api", "v4", "projects", utils::URLencode(paste0(remote$username, "/", remote$repo), reserved = TRUE), "repository") src <- paste0( src_root, "/files/", ifelse( is.null(remote$subdir), "DESCRIPTION", utils::URLencode(paste0(remote$subdir, "/DESCRIPTION"), reserved = TRUE)), "/raw?ref=", remote$ref) dest <- tempfile() res <- download(dest, src, headers = c("Private-Token" = remote$auth_token)) tryCatch( read_dcf(dest)$Package, error = function(e) remote$repo) } #' @export remote_sha.gitlab_remote <- function(remote, ...) { gitlab_commit(username = remote$username, repo = remote$repo, host = remote$host, ref = remote$ref, pat = remote$auth_token) } #' @export format.gitlab_remote <- function(x, ...) { "GitLab" } gitlab_commit <- function(username, repo, ref = "master", host = "gitlab.com", pat = gitlab_pat()) { url <- build_url(host, "api", "v4", "projects", utils::URLencode(paste0(username, "/", repo), reserved = TRUE), "repository", "commits", ref) tmp <- tempfile() download(tmp, url, headers = c("Private-Token" = pat)) json$parse_file(tmp)$id } #' Retrieve GitLab personal access token. #' #' A GitLab personal access token #' Looks in env var `GITLAB_PAT` #' #' @keywords internal #' @export gitlab_pat <- function(quiet = TRUE) { pat <- Sys.getenv("GITLAB_PAT") if (nzchar(pat)) { if (!quiet) { message("Using GitLab PAT from envvar GITLAB_PAT") } return(pat) } return(NULL) } gitlab_project_id <- function(username, repo, ref = "master", host = "gitlab.com", pat = gitlab_pat()) { url <- build_url(host, "api", "v4", "projects", utils::URLencode(paste0(username, "/", repo), reserved = TRUE), "repository", "commits", ref) tmp <- tempfile() download(tmp, url, headers = c("Private-Token" = pat)) json$parse_file(tmp)$project_id } # Contents of R/install-local.R #' Install a package from a local file #' #' This function is vectorised so you can install multiple packages in #' a single command. #' #' @param path path to local directory, or compressed file (tar, zip, tar.gz #' tar.bz2, tgz2 or tbz) #' @inheritParams install_url #' @inheritParams install_github #' @export #' @family package installation #' @examples #' \dontrun{ #' dir <- tempfile() #' dir.create(dir) #' pkg <- download.packages("testthat", dir, type = "source") #' install_local(pkg[, 2]) #' } install_local <- function(path = ".", subdir = NULL, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = !is_binary_pkg(path), build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { remotes <- lapply(path, local_remote, subdir = subdir) install_remotes(remotes, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } local_remote <- function(path, subdir = NULL, branch = NULL, args = character(0), ...) { remote("local", path = normalizePath(path), subdir = subdir ) } #' @export remote_download.local_remote <- function(x, quiet = FALSE) { # Already downloaded - just need to copy to tempdir() bundle <- tempfile() dir.create(bundle) suppressWarnings( res <- file.copy(x$path, bundle, recursive = TRUE) ) if (!all(res)) { stop("Could not copy `", x$path, "` to `", bundle, "`", call. = FALSE) } # file.copy() creates directory inside of bundle dir(bundle, full.names = TRUE)[1] } #' @export remote_metadata.local_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { list( RemoteType = "local", RemoteUrl = x$path, RemoteSubdir = x$subdir ) } #' @export remote_package_name.local_remote <- function(remote, ...) { is_tarball <- !dir.exists(remote$path) if (is_tarball) { # Assume the name is the name of the tarball return(sub("_.*$", "", basename(remote$path))) } description_path <- file.path(remote$path, "DESCRIPTION") read_dcf(description_path)$Package } #' @export remote_sha.local_remote <- function(remote, ...) { is_tarball <- !dir.exists(remote$path) if (is_tarball) { return(NA_character_) } read_dcf(file.path(remote$path, "DESCRIPTION"))$Version } #' @export format.local_remote <- function(x, ...) { "local" } # Contents of R/install-remote.R #' Install a remote package. #' #' This: #' \enumerate{ #' \item downloads source bundle #' \item decompresses & checks that it's a package #' \item adds metadata to DESCRIPTION #' \item calls install #' } #' @noRd install_remote <- function(remote, dependencies, upgrade, force, quiet, build, build_opts, build_manual, build_vignettes, repos, type, ...) { stopifnot(is.remote(remote)) package_name <- remote_package_name(remote) local_sha <- local_sha(package_name) remote_sha <- remote_sha(remote, local_sha) if (!isTRUE(force) && !different_sha(remote_sha = remote_sha, local_sha = local_sha)) { if (!quiet) { message( "Skipping install of '", package_name, "' from a ", sub("_remote", "", class(remote)[1L]), " remote,", " the SHA1 (", substr(remote_sha, 1L, 8L), ") has not changed since last install.\n", " Use `force = TRUE` to force installation") } return(invisible(package_name)) } if (inherits(remote, "cran_remote")) { install_packages( package_name, repos = remote$repos, type = remote$pkg_type, dependencies = dependencies, quiet = quiet, ...) return(invisible(package_name)) } res <- try(bundle <- remote_download(remote, quiet = quiet), silent = quiet) if (inherits(res, "try-error")) { return(NA_character_) } on.exit(unlink(bundle), add = TRUE) source <- source_pkg(bundle, subdir = remote$subdir) on.exit(unlink(source, recursive = TRUE), add = TRUE) update_submodules(source, remote$subdir, quiet) add_metadata(source, remote_metadata(remote, bundle, source, remote_sha)) # Because we've modified DESCRIPTION, its original MD5 value is wrong clear_description_md5(source) install(source, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } install_remotes <- function(remotes, ...) { res <- character(length(remotes)) for (i in seq_along(remotes)) { tryCatch( res[[i]] <- install_remote(remotes[[i]], ...), error = function(e) { stop(remote_install_error(remotes[[i]], e)) }) } invisible(res) } remote_install_error <- function(remote, error) { msg <- sprintf( "Failed to install '%s' from %s:\n %s", remote_name_or_unknown(remote), format(remote), conditionMessage(error) ) structure(list(message = msg, call = NULL, error = error, remote = remote), class = c("install_error", "error", "condition")) } remote_name_or_unknown <- function(remote) { res <- tryCatch( res <- remote_package_name(remote), error = function(e) NA_character_) if (is.na(res)) { return("unknown package") } res } # Add metadata add_metadata <- function(pkg_path, meta) { # During installation, the DESCRIPTION file is read and an package.rds file # created with most of the information from the DESCRIPTION file. Functions # that read package metadata may use either the DESCRIPTION file or the # package.rds file, therefore we attempt to modify both of them source_desc <- file.path(pkg_path, "DESCRIPTION") binary_desc <- file.path(pkg_path, "Meta", "package.rds") if (file.exists(source_desc)) { desc <- read_dcf(source_desc) desc <- utils::modifyList(desc, meta) write_dcf(source_desc, desc) } if (file.exists(binary_desc)) { pkg_desc <- base::readRDS(binary_desc) desc <- as.list(pkg_desc$DESCRIPTION) desc <- utils::modifyList(desc, meta) pkg_desc$DESCRIPTION <- stats::setNames(as.character(desc), names(desc)) base::saveRDS(pkg_desc, binary_desc) } } # Modify the MD5 file - remove the line for DESCRIPTION clear_description_md5 <- function(pkg_path) { path <- file.path(pkg_path, "MD5") if (file.exists(path)) { text <- readLines(path) text <- text[!grepl(".*\\*DESCRIPTION$", text)] writeLines(text, path) } } remote <- function(type, ...) { structure(list(...), class = c(paste0(type, "_remote"), "remote")) } is.remote <- function(x) inherits(x, "remote") remote_download <- function(x, quiet = FALSE) UseMethod("remote_download") remote_metadata <- function(x, bundle = NULL, source = NULL, sha = NULL) UseMethod("remote_metadata") remote_package_name <- function(remote, ...) UseMethod("remote_package_name") remote_sha <- function(remote, ...) UseMethod("remote_sha") remote_package_name.default <- function(remote, ...) remote$repo remote_sha.default <- function(remote, ...) NA_character_ different_sha <- function(remote_sha, local_sha) { same <- remote_sha == local_sha same <- isTRUE(same) && !is.na(same) !same } local_sha <- function(name) { package2remote(name)$sha %||% NA_character_ } # Convert an installed package to its equivalent remote. This constructs the # remote from metadata stored in the package's DESCRIPTION file; the metadata # is added to the package when it is installed by remotes. If the package is # installed some other way, such as by `install.packages()` there will be no # meta-data, so there we construct a generic CRAN remote. package2remote <- function(name, lib = .libPaths(), repos = getOption("repos"), type = getOption("pkgType")) { x <- tryCatch(utils::packageDescription(name, lib.loc = lib), error = function(e) NA, warning = function(e) NA) # will be NA if not installed if (identical(x, NA)) { return(remote("cran", name = name, repos = repos, pkg_type = type, sha = NA_character_)) } if (is.null(x$RemoteType) || x$RemoteType == "cran") { # Packages installed with install.packages() or locally without remotes return(remote("cran", name = x$Package, repos = repos, pkg_type = type, sha = x$Version)) } switch(x$RemoteType, standard = remote("cran", name = x$Package, repos = x$RemoteRepos %||% repos, pkg_type = x$RemotePkgType %||% type, sha = x$RemoteSha), github = remote("github", host = x$RemoteHost, package = x$RemotePackage, repo = x$RemoteRepo, subdir = x$RemoteSubdir, username = x$RemoteUsername, ref = x$RemoteRef, sha = x$RemoteSha, auth_token = github_pat()), gitlab = remote("gitlab", host = x$RemoteHost, repo = x$RemoteRepo, subdir = x$RemoteSubdir, username = x$RemoteUsername, ref = x$RemoteRef, sha = x$RemoteSha, auth_token = gitlab_pat()), xgit = remote("xgit", url = trim_ws(x$RemoteUrl), ref = x$RemoteRef %||% x$RemoteBranch, sha = x$RemoteSha, subdir = x$RemoteSubdir, args = x$RemoteArgs), git2r = remote("git2r", url = trim_ws(x$RemoteUrl), ref = x$RemoteRef %||% x$RemoteBranch, sha = x$RemoteSha, subdir = x$RemoteSubdir, credentials = git_credentials()), bitbucket = remote("bitbucket", host = x$RemoteHost, repo = x$RemoteRepo, username = x$RemoteUsername, ref = x$RemoteRef, sha = x$RemoteSha, subdir = x$RemoteSubdir, auth_user = bitbucket_user(), password = bitbucket_password()), svn = remote("svn", url = trim_ws(x$RemoteUrl), svn_subdir = x$RemoteSubdir, revision = x$RemoteSha, args = x$RemoteArgs), local = remote("local", path = trim_ws(x$RemoteUrl), subdir = x$RemoteSubdir, sha = { # Packages installed locally might have RemoteSha == NA_character_ x$RemoteSha %||% x$Version }), url = remote("url", url = trim_ws(x$RemoteUrl), subdir = x$RemoteSubdir, config = x$RemoteConfig, pkg_type = x$RemotePkgType %||% type), bioc_git2r = remote("bioc_git2r", mirror = x$RemoteMirror, repo = x$RemoteRepo, release = x$RemoteRelease, sha = x$RemoteSha, branch = x$RemoteBranch), bioc_xgit = remote("bioc_xgit", mirror = x$RemoteMirror, repo = x$RemoteRepo, release = x$RemoteRelease, sha = x$RemoteSha, branch = x$RemoteBranch), stop(sprintf("can't convert package %s with RemoteType '%s' to remote", name, x$RemoteType)) ) } #' @export format.remotes <- function(x, ...) { vapply(x, format, character(1)) } # Contents of R/install-svn.R #' Install a package from a SVN repository #' #' This function requires \command{svn} to be installed on your system in order to #' be used. #' #' It is vectorised so you can install multiple packages with #' a single command. #' #' @inheritParams install_git #' @param subdir A sub-directory within a svn repository that contains the #' package we are interested in installing. #' @param args A character vector providing extra options to pass on to #' \command{svn}. #' @param revision svn revision, if omitted updates to latest #' @param ... Other arguments passed on to [utils::install.packages()]. #' @inheritParams install_github #' @family package installation #' @export #' #' @examples #' \dontrun{ #' install_svn("https://github.com/hadley/stringr/trunk") #' install_svn("https://github.com/hadley/httr/branches/oauth") #'} install_svn <- function(url, subdir = NULL, args = character(0), revision = NULL, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { remotes <- lapply(url, svn_remote, svn_subdir = subdir, revision = revision, args = args) install_remotes(remotes, args = args, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } svn_remote <- function(url, svn_subdir = NULL, revision = NULL, args = character(0), ...) { remote("svn", url = url, svn_subdir = svn_subdir, revision = revision, args = args ) } #' @export remote_download.svn_remote <- function(x, quiet = FALSE) { if (!quiet) { message("Downloading svn repo ", x$url) } bundle <- tempfile() svn_binary_path <- svn_path() url <- x$url args <- "co" if (!is.null(x$revision)) { args <- c(args, "-r", x$revision) } args <- c(args, x$args, full_svn_url(x), bundle) if (!quiet) { message(shQuote(svn_binary_path), " ", paste0(args, collapse = " ")) } request <- system2(svn_binary_path, args, stdout = FALSE, stderr = FALSE) # This is only looking for an error code above 0-success if (request > 0) { stop("There seems to be a problem retrieving this SVN-URL.", call. = FALSE) } in_dir(bundle, { if (!is.null(x$revision)) { request <- system2(svn_binary_path, paste("update -r", x$revision), stdout = FALSE, stderr = FALSE) if (request > 0) { stop("There was a problem switching to the requested SVN revision", call. = FALSE) } } }) bundle } #' @export remote_metadata.svn_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { if (!is.null(bundle)) { in_dir(bundle, { revision <- svn_revision() }) } else { revision <- sha } list( RemoteType = "svn", RemoteUrl = x$url, RemoteSubdir = x$svn_subdir, RemoteArgs = if (length(x$args) > 0) paste0(deparse(x$args), collapse = " "), RemoteSha = revision # for compatibility with other remotes ) } svn_path <- function(svn_binary_name = NULL) { # Use user supplied path if (!is.null(svn_binary_name)) { if (!file.exists(svn_binary_name)) { stop("Path ", svn_binary_name, " does not exist", .call = FALSE) } return(svn_binary_name) } # Look on path svn_path <- Sys.which("svn")[[1]] if (svn_path != "") return(svn_path) # On Windows, look in common locations if (os_type() == "windows") { look_in <- c( "C:/Program Files/Svn/bin/svn.exe", "C:/Program Files (x86)/Svn/bin/svn.exe" ) found <- file.exists(look_in) if (any(found)) return(look_in[found][1]) } stop("SVN does not seem to be installed on your system.", call. = FALSE) } #' @export remote_package_name.svn_remote <- function(remote, ...) { description_url <- file.path(full_svn_url(remote), "DESCRIPTION") tmp_file <- tempfile() on.exit(rm(tmp_file)) response <- system2(svn_path(), paste("cat", description_url), stdout = tmp_file) if (!identical(response, 0L)) { return(NA_character_) } read_dcf(tmp_file)$Package } #' @export remote_sha.svn_remote <- function(remote, ...) { svn_revision(full_svn_url(remote)) } svn_revision <- function(url = NULL, svn_binary_path = svn_path()) { request <- system2(svn_binary_path, paste("info --xml", url), stdout = TRUE) if (!is.null(attr(request, "status")) && !identical(attr(request, "status"), 0L)) { stop("There was a problem retrieving the current SVN revision", call. = FALSE) } gsub(".*.*", "\\1", paste(collapse = "\n", request)) } full_svn_url <- function(x) { url <- x$url if (!is.null(x$svn_subdir)) { url <- file.path(url, x$svn_subdir) } url } format.svn_remote <- function(x, ...) { "SVN" } # Contents of R/install-url.R #' Install a package from a url #' #' This function is vectorised so you can install multiple packages in #' a single command. #' #' @param url location of package on internet. The url should point to a #' zip file, a tar file or a bzipped/gzipped tar file. #' @param subdir subdirectory within url bundle that contains the R package. #' @param ... Other arguments passed on to [utils::install.packages()]. #' @inheritParams install_github #' @export #' #' @family package installation #' @examples #' \dontrun{ #' install_url("https://github.com/hadley/stringr/archive/master.zip") #' } install_url <- function(url, subdir = NULL, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = getOption("pkgType"), ...) { remotes <- lapply(url, url_remote, subdir = subdir) install_remotes(remotes, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) } url_remote <- function(url, subdir = NULL, ...) { remote("url", url = url, subdir = subdir ) } #' @importFrom tools file_ext #' @export remote_download.url_remote <- function(x, quiet = FALSE) { if (!quiet) { message("Downloading package from url: ", x$url) # nocov } ext <- if (grepl("\\.tar\\.gz$", x$url)) "tar.gz" else file_ext(x$url) bundle <- tempfile(fileext = paste0(".", ext)) download(bundle, x$url) } #' @export remote_metadata.url_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) { list( RemoteType = "url", RemoteUrl = x$url, RemoteSubdir = x$subdir ) } #' @export remote_package_name.url_remote <- function(remote, ...) { NA_character_ } #' @export remote_sha.url_remote <- function(remote, ...) { NA_character_ } #' @export format.url_remote <- function(x, ...) { "URL" } # Contents of R/install-version.R #' Install specified version of a CRAN package. #' #' If you are installing an package that contains compiled code, you will #' need to have an R development environment installed. You can check #' if you do by running `devtools::has_devel` (you need the #' `devtools` package for this). #' #' @export #' @family package installation #' @param package package name #' @param version If the specified version is NULL or the same as the most #' recent version of the package, this function simply calls #' [utils::install.packages()]. Otherwise, it looks at the list of #' archived source tarballs and tries to install an older version instead. #' @param ... Other arguments passed on to [utils::install.packages()]. #' @inheritParams utils::install.packages #' @inheritParams install_github #' @author Jeremy Stephens #' @importFrom utils available.packages contrib.url install.packages install_version <- function(package, version = NULL, dependencies = NA, upgrade = c("default", "ask", "always", "never"), force = FALSE, quiet = FALSE, build = FALSE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, repos = getOption("repos"), type = "source", ...) { if (!identical(type, "source")) { stop("`type` must be 'source' for `install_version()`", call. = FALSE) } url <- download_version_url(package, version, repos, type) res <- install_url(url, dependencies = dependencies, upgrade = upgrade, force = force, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, repos = repos, type = type, ...) lib <- list(...)$lib %||% .libPaths() # Remove Metadata from installed package add_metadata( system.file(package = package, lib.loc = lib), list(RemoteType = NULL, RemoteUrl = NULL, RemoteSubdir = NULL)) invisible(res) } package_find_repo <- function(package, repos) { for (repo in repos) { if (length(repos) > 1) message("Trying ", repo) archive <- tryCatch({ con <- gzcon(url(sprintf("%s/src/contrib/Meta/archive.rds", repo), "rb")) on.exit(close(con)) readRDS(con) }, warning = function(e) list(), error = function(e) list()) info <- archive[[package]] if (!is.null(info)) { info$repo <- repo return(info) } } stop(sprintf("couldn't find package '%s'", package)) } #' Download a specified version of a CRAN package #' #' It downloads the package to a temporary file, and #' returns the name of the file. #' #' @inheritParams install_version #' @return Name of the downloaded file. #' #' @export download_version <- function(package, version = NULL, repos = getOption("repos"), type = getOption("pkgType"), ...) { url <- download_version_url(package, version, repos, type) download(path = tempfile(), url = url) } download_version_url <- function(package, version, repos, type) { contriburl <- contrib.url(repos, type) available <- available.packages(contriburl) if (package %in% row.names(available)) { current.version <- available[package, 'Version'] if (is.null(version) || version == current.version) { row <- available[which(rownames(available) == package)[1], ] return(paste0( row[["Repository"]], "/", row[["Package"]], "_", row[["Version"]], ".tar.gz" )) } } info <- package_find_repo(package, repos) if (is.null(version)) { # Grab the latest one: only happens if pulled from CRAN package.path <- row.names(info)[nrow(info)] } else { package.path <- paste(package, "/", package, "_", version, ".tar.gz", sep = "") if (!(package.path %in% row.names(info))) { stop(sprintf("version '%s' is invalid for package '%s'", version, package)) } } paste(info$repo[1L], "/src/contrib/Archive/", package.path, sep = "") } # Contents of R/install.R install <- function(pkgdir, dependencies, quiet, build, build_opts, build_manual, build_vignettes, upgrade, repos, type, ...) { warn_for_potential_errors() if (file.exists(file.path(pkgdir, "src"))) { if (has_package("pkgbuild")) { pkgbuild::local_build_tools(required = TRUE) } else if (!has_devel()) { missing_devel_warning(pkgdir) } } pkg_name <- load_pkg_description(pkgdir)$package ## Check for circular dependencies. We need to know about the root ## of the install process. if (is_root_install()) on.exit(exit_from_root_install(), add = TRUE) if (check_for_circular_dependencies(pkgdir, quiet)) { return(invisible(pkg_name)) } install_deps(pkgdir, dependencies = dependencies, quiet = quiet, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, upgrade = upgrade, repos = repos, type = type, ...) if (isTRUE(build)) { dir <- tempfile() dir.create(dir) on.exit(unlink(dir), add = TRUE) pkgdir <- safe_build_package(pkgdir, build_opts, build_manual, build_vignettes, dir, quiet) } safe_install_packages( pkgdir, repos = NULL, quiet = quiet, type = "source", ... ) invisible(pkg_name) } safe_install_packages <- function(...) { lib <- paste(.libPaths(), collapse = .Platform$path.sep) if (!is_standalone() && has_package("crancache") && has_package("callr")) { i.p <- "crancache" %::% "install_packages" } else { i.p <- utils::install.packages } with_options(list(install.lock = getOption("install.lock", TRUE)), { with_envvar( c(R_LIBS = lib, R_LIBS_USER = lib, R_LIBS_SITE = lib, RGL_USE_NULL = "TRUE"), # Set options(warn = 2) for this process and child processes, so that # warnings from `install.packages()` are converted to errors. if (should_error_for_warnings()) { with_options(list(warn = 2), with_rprofile_user("options(warn = 2)", i.p(...) ) ) } else { i.p(...) } ) }) } normalize_build_opts <- function(build_opts, build_manual, build_vignettes) { if (!isTRUE(build_manual)) { build_opts <- union(build_opts, "--no-manual") } else { build_opts <- setdiff(build_opts, "--no-manual") } if (!isTRUE(build_vignettes)) { build_opts <- union(build_opts, "--no-build-vignettes") } else { build_opts <- setdiff(build_opts, "--no-build-vignettes") } build_opts } safe_build_package <- function(pkgdir, build_opts, build_manual, build_vignettes, dest_path, quiet, use_pkgbuild = !is_standalone() && pkg_installed("pkgbuild")) { build_opts <- normalize_build_opts(build_opts, build_manual, build_vignettes) if (use_pkgbuild) { vignettes <- TRUE manual <- FALSE has_no_vignettes <- grepl("--no-build-vignettes", build_opts) if (any(has_no_vignettes)) { vignettes <- FALSE } has_no_manual <- grepl("--no-manual", build_opts) if (!any(has_no_manual)) { manual <- TRUE } build_opts <- build_opts[!(has_no_vignettes | has_no_manual)] pkgbuild::build(pkgdir, dest_path = dest_path, binary = FALSE, vignettes = vignettes, manual = manual, args = build_opts, quiet = quiet) } else { # No pkgbuild, so we need to call R CMD build ourselves lib <- paste(.libPaths(), collapse = .Platform$path.sep) env <- c(R_LIBS = lib, R_LIBS_USER = lib, R_LIBS_SITE = lib, R_PROFILE_USER = tempfile()) pkgdir <- normalizePath(pkgdir) message("Running `R CMD build`...") in_dir(dest_path, { with_envvar(env, { output <- rcmd("build", c(build_opts, shQuote(pkgdir)), quiet = quiet, fail_on_status = FALSE) }) }) if (output$status != 0) { cat("STDOUT:\n") cat(output$stdout, sep = "\n") cat("STDERR:\n") cat(output$stderr, sep = "\n") msg_for_long_paths(output) stop(sprintf("Failed to `R CMD build` package, try `build = FALSE`."), call. = FALSE) } building_regex <- paste0( "^[*] building[^[:alnum:]]+", # prefix, "* building '" "([-[:alnum:]_.]+)", # package file name, e.g. xy_1.0-2.tar.gz "[^[:alnum:]]+$" # trailing quote ) pkgfile <- sub(building_regex, "\\1", output$stdout[length(output$stdout)]) file.path(dest_path, pkgfile) } } msg_for_long_paths <- function(output) { if (sys_type() == "windows" && (r_error_matches("over-long path", output$stderr) || r_error_matches("over-long path length", output$stderr))) { message( "\nIt seems that this package contains files with very long paths.\n", "This is not supported on most Windows versions. Please contact the\n", "package authors and tell them about this. See this GitHub issue\n", "for more details: https://github.com/r-lib/remotes/issues/84\n") } } r_error_matches <- function(msg, str) { any(grepl(msg, str)) || any(grepl(gettext(msg, domain = "R"), str)) } #' Install package dependencies if needed. #' #' @inheritParams package_deps #' @param ... additional arguments passed to [utils::install.packages()]. #' @param build If `TRUE` build the package before installing. #' @param build_opts Options to pass to `R CMD build`, only used when `build` #' @param build_manual If `FALSE`, don't build PDF manual ('--no-manual'). #' @param build_vignettes If `FALSE`, don't build package vignettes ('--no-build-vignettes'). #' is `TRUE`. #' @export #' @examples #' \dontrun{install_deps(".")} install_deps <- function(pkgdir = ".", dependencies = NA, repos = getOption("repos"), type = getOption("pkgType"), upgrade = c("default", "ask", "always", "never"), quiet = FALSE, build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"), build_manual = FALSE, build_vignettes = FALSE, ...) { packages <- dev_package_deps( pkgdir, repos = repos, dependencies = dependencies, type = type ) dep_deps <- if (isTRUE(dependencies)) NA else dependencies update( packages, dependencies = dep_deps, quiet = quiet, upgrade = upgrade, build = build, build_opts = build_opts, build_manual = build_manual, build_vignettes = build_vignettes, type = type, ... ) } should_error_for_warnings <- function() { force_suggests <- Sys.getenv("_R_CHECK_FORCE_SUGGESTS_", "true") no_errors <- Sys.getenv("R_REMOTES_NO_ERRORS_FROM_WARNINGS", !config_val_to_logical(force_suggests)) !config_val_to_logical(no_errors) } # Contents of R/json.R # Standalone JSON parser # # The purpose of this file is to provide a standalone JSON parser. # It is quite slow and bare. If you need a proper parser please use the # jsonlite package. # # The canonical location of this file is in the remotes package: # https://github.com/r-lib/remotes/blob/master/R/json.R # # API: # parse(text) # parse_file(filename) # # NEWS: # - 2019/05/15 First standalone version json <- local({ tokenize_json <- function(text) { text <- paste(text, collapse = "\n") ESCAPE <- '(\\\\[^u[:cntrl:]]|\\\\u[0-9a-fA-F]{4})' CHAR <- '[^[:cntrl:]"\\\\]' STRING <- paste0('"', CHAR, '*(', ESCAPE, CHAR, '*)*"') NUMBER <- "-?(0|[1-9][0-9]*)([.][0-9]*)?([eE][+-]?[0-9]*)?" KEYWORD <- 'null|false|true' SPACE <- '[[:space:]]+' match <- gregexpr( pattern = paste0( STRING, "|", NUMBER, "|", KEYWORD, "|", SPACE, "|", "." ), text = text, perl = TRUE ) grep("^\\s+$", regmatches(text, match)[[1]], value = TRUE, invert = TRUE) } throw <- function(...) { stop("JSON: ", ..., call. = FALSE) } # Parse a JSON file # # @param filename Path to the JSON file. # @return R objects corresponding to the JSON file. parse_file <- function(filename) { parse(readLines(filename, warn = FALSE)) } # Parse a JSON string # # @param text JSON string. # @return R object corresponding to the JSON string. parse <- function(text) { tokens <- tokenize_json(text) token <- NULL ptr <- 1 read_token <- function() { if (ptr <= length(tokens)) { token <<- tokens[ptr] ptr <<- ptr + 1 } else { token <<- 'EOF' } } parse_value <- function(name = "") { if (token == "{") { parse_object() } else if (token == "[") { parse_array() } else if (token == "EOF" || (nchar(token) == 1 && ! token %in% 0:9)) { throw("EXPECTED value GOT ", token) } else { j2r(token) } } parse_object <- function() { res <- structure(list(), names = character()) read_token() ## Invariant: we are at the beginning of an element while (token != "}") { ## "key" if (grepl('^".*"$', token)) { key <- j2r(token) } else { throw("EXPECTED string GOT ", token) } ## : read_token() if (token != ":") { throw("EXPECTED : GOT ", token) } ## value read_token() res[key] <- list(parse_value()) ## } or , read_token() if (token == "}") { break } else if (token != ",") { throw("EXPECTED , or } GOT ", token) } read_token() } res } parse_array <- function() { res <- list() read_token() ## Invariant: we are at the beginning of an element while (token != "]") { ## value res <- c(res, list(parse_value())) ## ] or , read_token() if (token == "]") { break } else if (token != ",") { throw("EXPECTED , GOT ", token) } read_token() } res } read_token() parse_value(tokens) } j2r <- function(token) { if (token == "null") { NULL } else if (token == "true") { TRUE } else if (token == "false") { FALSE } else if (grepl('^".*"$', token)) { trimq(token) } else { as.numeric(token) } } trimq <- function(x) { sub('^"(.*)"$', "\\1", x) } structure( list( .internal = environment(), parse = parse, parse_file = parse_file ), class = c("standalone_json", "standalone")) }) # Contents of R/package-deps.R parse_deps <- function(string) { if (is.null(string)) return() stopifnot(is.character(string), length(string) == 1) if (grepl("^\\s*$", string)) return() # Split by commas with surrounding whitespace removed pieces <- strsplit(string, "[[:space:]]*,[[:space:]]*")[[1]] # Get the names names <- gsub("\\s*\\(.*?\\)", "", pieces) names <- gsub("^\\s+|\\s+$", "", names) # Get the versions and comparison operators versions_str <- pieces have_version <- grepl("\\(.*\\)", versions_str) versions_str[!have_version] <- NA compare <- sub(".*\\(\\s*(\\S+)\\s+.*\\s*\\).*", "\\1", versions_str) versions <- sub(".*\\(\\s*\\S+\\s+(\\S*)\\s*\\).*", "\\1", versions_str) # Check that non-NA comparison operators are valid compare_nna <- compare[!is.na(compare)] compare_valid <- compare_nna %in% c(">", ">=", "==", "<=", "<") if(!all(compare_valid)) { stop("Invalid comparison operator in dependency: ", paste(compare_nna[!compare_valid], collapse = ", ")) } deps <- data.frame(name = names, compare = compare, version = versions, stringsAsFactors = FALSE) # Remove R dependency deps[names != "R", ] } # Contents of R/package.R load_pkg_description <- function(path) { path <- normalizePath(path) if (!is_dir(path)) { dir <- tempfile() path_desc <- untar_description(path, dir = dir) on.exit(unlink(dir, recursive = TRUE)) } else { path_desc <- file.path(path, "DESCRIPTION") } desc <- read_dcf(path_desc) names(desc) <- tolower(names(desc)) desc$path <- path desc } # Contents of R/parse-git.R #' Parse a remote git repo specification #' #' A remote repo can be specified in two ways: #' \describe{ #' \item{as a URL}{`parse_github_url()` handles HTTPS and SSH remote URLs #' and various GitHub browser URLs} #' \item{via a shorthand}{`parse_repo_spec()` handles this concise form: #' `[username/]repo[/subdir][#pull|@ref|@*release]`} #' } #' #' @param repo Character scalar, the repo specification. #' @return List with members: `username`, `repo`, `subdir` #' `ref`, `pull`, `release`, some which will be empty. #' #' @name parse-git-repo #' @examples #' parse_repo_spec("metacran/crandb") #' parse_repo_spec("jimhester/covr#47") ## pull request #' parse_repo_spec("jeroen/curl@v0.9.3") ## specific tag #' parse_repo_spec("tidyverse/dplyr@*release") ## shorthand for latest release #' parse_repo_spec("r-lib/remotes@550a3c7d3f9e1493a2ba") ## commit SHA #' parse_repo_spec("igraph=igraph/rigraph") ## Different package name from repo name #' #' parse_github_url("https://github.com/jeroen/curl.git") #' parse_github_url("git@github.com:metacran/crandb.git") #' parse_github_url("https://github.com/jimhester/covr") #' parse_github_url("https://github.example.com/user/repo.git") #' parse_github_url("git@github.example.com:user/repo.git") #' #' parse_github_url("https://github.com/r-lib/remotes/pull/108") #' parse_github_url("https://github.com/r-lib/remotes/tree/name-of-branch") #' parse_github_url("https://github.com/r-lib/remotes/commit/1234567") #' parse_github_url("https://github.com/r-lib/remotes/releases/latest") #' parse_github_url("https://github.com/r-lib/remotes/releases/tag/1.0.0") NULL #' @export #' @rdname parse-git-repo parse_repo_spec <- function(repo) { package_name_rx <- "(?:(?[[:alpha:]][[:alnum:].]*[[:alnum:]])=)?" username_rx <- "(?:(?[^/]+)/)" repo_rx <- "(?[^/@#]+)" subdir_rx <- "(?:/(?[^@#]*[^@#/])/?)?" ref_rx <- "(?:@(?[^*].*))" pull_rx <- "(?:#(?[0-9]+))" release_rx <- "(?:@(?[*]release))" ref_or_pull_or_release_rx <- sprintf( "(?:%s|%s|%s)?", ref_rx, pull_rx, release_rx ) spec_rx <- sprintf( "^%s%s%s%s%s$", package_name_rx, username_rx, repo_rx, subdir_rx, ref_or_pull_or_release_rx ) params <- as.list(re_match(text = repo, pattern = spec_rx)) if (is.na(params$.match)) { stop(sprintf("Invalid git repo specification: '%s'", repo)) } params[grepl("^[^\\.]", names(params))] } #' @export #' @rdname parse-git-repo parse_github_repo_spec <- parse_repo_spec #' @export #' @rdname parse-git-repo parse_github_url <- function(repo) { prefix_rx <- "(?:github[^/:]+[/:])" username_rx <- "(?:(?[^/]+)/)" repo_rx <- "(?[^/@#]+)" ref_rx <- "(?:(?:tree|commit|releases/tag)/(?.+$))" pull_rx <- "(?:pull/(?.+$))" release_rx <- "(?:releases/)(?.+$)" ref_or_pull_or_release_rx <- sprintf( "(?:/(%s|%s|%s))?", ref_rx, pull_rx, release_rx ) url_rx <- sprintf( "%s%s%s%s", prefix_rx, username_rx, repo_rx, ref_or_pull_or_release_rx ) params <- as.list(re_match(text = repo, pattern = url_rx)) if (is.na(params$.match)) { stop(sprintf("Invalid GitHub URL: '%s'", repo)) } if (params$ref == "" && params$pull == "" && params$release == "") { params$repo <- gsub("\\.git$", "", params$repo) } if (params$release == "latest") { params$release <- "*release" } params[grepl("^[^\\.]", names(params))] } parse_git_repo <- function(repo) { if (grepl("^https://github|^git@github", repo)) { params <- parse_github_url(repo) } else { params <- parse_repo_spec(repo) } params <- params[viapply(params, nchar) > 0] if (!is.null(params$pull)) { params$ref <- github_pull(params$pull) params$pull <- NULL } if (!is.null(params$release)) { params$ref <- github_release() params$release <- NULL } params } # Contents of R/submodule.R parse_submodules <- function(file) { if (grepl("\n", file)) { x <- strsplit(file, "\n")[[1]] } else { x <- readLines(file) } # https://git-scm.com/docs/git-config#_syntax # Subsection names are case sensitive and can contain any characters except # newline and the null byte. Doublequote " and backslash can be included by # escaping them as \" and \\ double_quoted_string_with_escapes <- '(?:\\\\.|[^"])*' # Otherwise extract section names section_names <- re_match( x, sprintf('^[[:space:]]*\\[submodule "(?%s)"\\][[:space:]]*$', double_quoted_string_with_escapes) )$submodule # If no sections found return the empty list if (all(is.na(section_names))) { return(list()) } # Extract name = value # The variable names are case-insensitive, allow only alphanumeric characters # and -, and must start with an alphabetic character. variable_name <- "[[:alpha:]][[:alnum:]-]*" mapping_values <- re_match( x, sprintf('^[[:space:]]*(?%s)[[:space:]]*=[[:space:]]*(?.*)[[:space:]]*$', variable_name), ) values <- cbind(submodule = fill(section_names), mapping_values[c("name", "value")], stringsAsFactors = FALSE) values <- values[!is.na(mapping_values$.match), ] # path and valid url are required if (!all(c("path", "url") %in% values$name)) { warning("Invalid submodule definition, skipping submodule installation", immediate. = TRUE, call. = FALSE) return(list()) } # Roughly equivalent to tidyr::spread(values, name, value) res <- stats::reshape(values, idvar = "submodule", timevar = "name", v.name = "value", direction = "wide") # Set the column names, reshape prepends `value.` to path, url and branch colnames(res) <- gsub("value[.]", "", colnames(res)) # path and valid url are required if (any(is.na(res$url), is.na(res$path))) { warning("Invalid submodule definition, skipping submodule installation", immediate. = TRUE, call. = FALSE) return(list()) } # branch is optional if (!exists("branch", res)) { res$branch <- NA_character_ } # Remove unneeded attribute attr(res, "reshapeWide") <- NULL # Remove rownames rownames(res) <- NULL res } # Adapted from https://stackoverflow.com/a/9517731/2055486 fill <- function(x) { not_missing <- !is.na(x) res <- x[not_missing] res[cumsum(not_missing)] } update_submodule <- function(url, path, branch, quiet) { args <- c('clone', '--depth', '1', '--no-hardlinks --recurse-submodules') if (length(branch) > 0 && !is.na(branch)) { args <- c(args, "--branch", branch) } args <- c(args, url, path) git(paste0(args, collapse = " "), quiet = quiet) } update_submodules <- function(source, subdir, quiet) { file <- file.path(source, ".gitmodules") if (!file.exists(file)) { if (!is.null(subdir)) { nb_sub_folders <- lengths(strsplit(subdir, "/")) source <- do.call(file.path, as.list(c(source, rep("..", nb_sub_folders)))) } file <- file.path(source, ".gitmodules") if (!file.exists(file)) { return() } } info <- parse_submodules(file) # Fixes #234 if (length(info) == 0) { return() } to_ignore <- in_r_build_ignore(info$path, file.path(source, ".Rbuildignore")) if (!(length(info) > 0)) { return() } info <- info[!to_ignore, ] for (i in seq_len(NROW(info))) { update_submodule(info$url[[i]], file.path(source, info$path[[i]]), info$branch[[i]], quiet) } } # Contents of R/system.R system_check <- function(command, args = character(), quiet = TRUE, error = TRUE, path = ".") { out <- tempfile() err <- tempfile() on.exit(unlink(out), add = TRUE) on.exit(unlink(err), add = TRUE) ## We suppress warnings, they are given if the command ## exits with a non-zero status res <- in_dir( path, suppressWarnings( system2(command, args = args, stdout = out, stderr = err) ) ) res <- list( stdout = tryCatch( suppressWarnings(win2unix(read_char(out))), error = function(e) "" ), stderr = tryCatch( suppressWarnings(win2unix(read_char(err))), error = function(e) "" ), status = res ) if (error && res$status != 0) { stop("Command ", command, " failed ", res$stderr) } if (! quiet) { if (! identical(res$stdout, NA_character_)) cat(res$stdout) if (! identical(res$stderr, NA_character_)) cat(res$stderr) } res } win2unix <- function(str) { gsub("\r\n", "\n", str, fixed = TRUE) } read_char <- function(path, ...) { readChar(path, nchars = file.info(path)$size, ...) } # Contents of R/utils.R `%||%` <- function (a, b) if (!is.null(a)) a else b `%:::%` <- function (p, f) get(f, envir = asNamespace(p)) `%::%` <- function (p, f) get(f, envir = asNamespace(p)) viapply <- function(X, FUN, ..., USE.NAMES = TRUE) { vapply(X, FUN, integer(1L), ..., USE.NAMES = USE.NAMES) } vlapply <- function(X, FUN, ..., USE.NAMES = TRUE) { vapply(X, FUN, logical(1L), ..., USE.NAMES = USE.NAMES) } rcmd <- function(cmd, args, path = R.home("bin"), quiet, fail_on_status = TRUE) { if (os_type() == "windows") { real_cmd <- file.path(path, "Rcmd.exe") args <- c(cmd, args) } else { real_cmd <- file.path(path, "R") args <- c("CMD", cmd, args) } stdoutfile <- tempfile() stderrfile <- tempfile() on.exit(unlink(c(stdoutfile, stderrfile), recursive = TRUE), add = TRUE) status <- system2(real_cmd, args, stderr = stderrfile, stdout = stdoutfile) out <- tryCatch(readLines(stdoutfile, warn = FALSE), error = function(x) "") err <- tryCatch(readLines(stderrfile, warn = FALSE), error = function(x) "") if (fail_on_status && status != 0) { cat("STDOUT:\n") cat(out, sep = "\n") cat("STDERR:\n") cat(err, sep = "\n") stop(sprintf("Error running '%s' (status '%i')", cmd, status), call. = FALSE) } if (!quiet) { cat(out, sep = "\n") } list(stdout = out, stderr = err, status = status) } is_bioconductor <- function(x) { !is.null(x$biocviews) } trim_ws <- function(x) { gsub("^[[:space:]]+|[[:space:]]+$", "", x) } set_envvar <- function(envs) { if (length(envs) == 0) return() stopifnot(is.named(envs)) old <- Sys.getenv(names(envs), names = TRUE, unset = NA) set <- !is.na(envs) both_set <- set & !is.na(old) if (any(set)) do.call("Sys.setenv", as.list(envs[set])) if (any(!set)) Sys.unsetenv(names(envs)[!set]) invisible(old) } with_envvar <- function(new, code) { old <- set_envvar(new) on.exit(set_envvar(old)) force(code) } is.named <- function(x) { !is.null(names(x)) && all(names(x) != "") } pkg_installed <- function(pkg) { if (pkg %in% loadedNamespaces()) { TRUE } else if (requireNamespace(pkg, quietly = TRUE)) { try(unloadNamespace(pkg)) TRUE } else { FALSE } } has_package <- function(pkg) { if (pkg %in% loadedNamespaces()) { TRUE } else { requireNamespace(pkg, quietly = TRUE) } } with_something <- function(set, reset = set) { function(new, code) { old <- set(new) on.exit(reset(old)) force(code) } } in_dir <- with_something(setwd) get_r_version <- function() { paste(R.version$major, sep = ".", R.version$minor) } set_options <- function(x) { do.call(options, as.list(x)) } with_options <- with_something(set_options) # Read the current user .Rprofile. Here is the order it is searched, from # ?Startup # # 'R_PROFILE_USER’ environment variable (and tilde expansion # will be performed). If this is unset, a file called ‘.Rprofile’ # is searched for in the current directory or in the user's home # directory (in that order). The user profile file is sourced into # the workspace. read_rprofile_user <- function() { f <- normalizePath(Sys.getenv("R_PROFILE_USER", ""), mustWork = FALSE) if (file.exists(f)) { return(readLines(f)) } f <- normalizePath("~/.Rprofile", mustWork = FALSE) if (file.exists(f)) { return(readLines(f)) } character() } with_rprofile_user <- function(new, code) { temp_rprofile <- tempfile() on.exit(unlink(temp_rprofile), add = TRUE) writeLines(c(read_rprofile_user(), new), temp_rprofile) with_envvar(c("R_PROFILE_USER" = temp_rprofile), { force(code) }) } ## There are two kinds of tar on windows, one needs --force-local ## not to interpret : characters, the other does not. We try both ways. untar <- function(tarfile, ...) { if (os_type() == "windows") { tarhelp <- tryCatch( system2("tar", "--help", stdout = TRUE, stderr = TRUE), error = function(x) "") if (any(grepl("--force-local", tarhelp))) { status <- try( suppressWarnings(utils::untar(tarfile, extras = "--force-local", ...)), silent = TRUE) if (! is_tar_error(status)) { return(status) } else { message("External tar failed with `--force-local`, trying without") } } } utils::untar(tarfile, ...) } is_tar_error <- function(status) { inherits(status, "try-error") || is_error_status(status) || is_error_status(attr(status, "status")) } is_error_status <- function(x) { is.numeric(x) && length(x) > 0 && !is.na(x) && x != 0 } os_type <- function() { .Platform$OS.type } sys_type <- function() { if (.Platform$OS.type == "windows") { "windows" } else if (Sys.info()["sysname"] == "Darwin") { "macos" } else if (Sys.info()["sysname"] == "Linux") { "linux" } else if (.Platform$OS.type == "unix") { "unix" } else { stop("Unknown OS") } } is_dir <- function(path) { file.info(path)$isdir } untar_description <- function(tarball, dir = tempfile()) { files <- untar(tarball, list = TRUE) desc <- grep("^[^/]+/DESCRIPTION$", files, value = TRUE) if (length(desc) < 1) stop("No 'DESCRIPTION' file in package") untar(tarball, desc, exdir = dir) file.path(dir, desc) } ## copied from rematch2@180fb61 re_match <- function(text, pattern, perl = TRUE, ...) { stopifnot(is.character(pattern), length(pattern) == 1, !is.na(pattern)) text <- as.character(text) match <- regexpr(pattern, text, perl = perl, ...) start <- as.vector(match) length <- attr(match, "match.length") end <- start + length - 1L matchstr <- substring(text, start, end) matchstr[ start == -1 ] <- NA_character_ res <- data.frame( stringsAsFactors = FALSE, .text = text, .match = matchstr ) if (!is.null(attr(match, "capture.start"))) { gstart <- attr(match, "capture.start") glength <- attr(match, "capture.length") gend <- gstart + glength - 1L groupstr <- substring(text, gstart, gend) groupstr[ gstart == -1 ] <- NA_character_ dim(groupstr) <- dim(gstart) res <- cbind(groupstr, res, stringsAsFactors = FALSE) } names(res) <- c(attr(match, "capture.names"), ".text", ".match") class(res) <- c("tbl_df", "tbl", class(res)) res } is_standalone <- function() { isTRUE(config_val_to_logical(Sys.getenv("R_REMOTES_STANDALONE", "false"))) } # This code is adapted from the perl MIME::Base64 module https://perldoc.perl.org/MIME/Base64.html # https://github.com/gisle/mime-base64/blob/cf23d49e517c6ed8f4b24295f63721e8c9935010/Base64.xs#L197 XX <- 255L EQ <- 254L INVALID <- XX index_64 <- as.integer(c( XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,62, XX,XX,XX,63, 52,53,54,55, 56,57,58,59, 60,61,XX,XX, XX,EQ,XX,XX, XX, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10, 11,12,13,14, 15,16,17,18, 19,20,21,22, 23,24,25,XX, XX,XX,XX,XX, XX,26,27,28, 29,30,31,32, 33,34,35,36, 37,38,39,40, 41,42,43,44, 45,46,47,48, 49,50,51,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX )) base64_decode <- function(x) { if (is.character(x)) { x <- charToRaw(x) } len <- length(x) idx <- 1 c <- integer(4) out <- raw() while(idx <= len) { i <- 1 while(i <= 4) { uc <- index_64[[as.integer(x[[idx]]) + 1L]] idx <- idx + 1 if (uc != INVALID) { c[[i]] <- uc i <- i + 1 } if (idx > len) { if (i <= 4) { if (i <= 2) return(rawToChar(out)) if (i == 3) { c[[3]] <- EQ c[[4]] <- EQ } break } } } if (c[[1]] == EQ || c[[2]] == EQ) { break } #print(sprintf("c1=%d,c2=%d,c3=%d,c4=%d\n", c[1],c[2],c[3],c[4])) out[[length(out) + 1]] <- as.raw(bitwOr(bitwShiftL(c[[1]], 2L), bitwShiftR(bitwAnd(c[[2]], 0x30), 4L))) if (c[[3]] == EQ) { break } out[[length(out) + 1]] <- as.raw(bitwOr(bitwShiftL(bitwAnd(c[[2]], 0x0F), 4L), bitwShiftR(bitwAnd(c[[3]], 0x3C), 2L))) if (c[[4]] == EQ) { break } out[[length(out) + 1]] <- as.raw(bitwOr(bitwShiftL(bitwAnd(c[[3]], 0x03), 6L), c[[4]])) } rawToChar(out) } basis64 <- charToRaw(paste(c(LETTERS, letters, 0:9, "+", "/"), collapse = "")) base64_encode <- function(x) { if (is.character(x)) { x <- charToRaw(x) } len <- length(x) rlen <- floor((len + 2L) / 3L) * 4L out <- raw(rlen) ip <- op <- 1L c <- integer(4) while (len > 0L) { c[[1]] <- as.integer(x[[ip]]) ip <- ip + 1L if (len > 1L) { c[[2]] <- as.integer(x[ip]) ip <- ip + 1L } else { c[[2]] <- 0L } out[op] <- basis64[1 + bitwShiftR(c[[1]], 2L)] op <- op + 1L out[op] <- basis64[1 + bitwOr(bitwShiftL(bitwAnd(c[[1]], 3L), 4L), bitwShiftR(bitwAnd(c[[2]], 240L), 4L))] op <- op + 1L if (len > 2) { c[[3]] <- as.integer(x[ip]) ip <- ip + 1L out[op] <- basis64[1 + bitwOr(bitwShiftL(bitwAnd(c[[2]], 15L), 2L), bitwShiftR(bitwAnd(c[[3]], 192L), 6L))] op <- op + 1L out[op] <- basis64[1 + bitwAnd(c[[3]], 63)] op <- op + 1L } else if (len == 2) { out[op] <- basis64[1 + bitwShiftL(bitwAnd(c[[2]], 15L), 2L)] op <- op + 1L out[op] <- charToRaw("=") op <- op + 1L } else { ## len == 1 out[op] <- charToRaw("=") op <- op + 1L out[op] <- charToRaw("=") op <- op + 1L } len <- len - 3L } rawToChar(out) } build_url <- function(host, ...) { download_url(do.call(file.path, as.list(c(host, ...)))) } download_url <- function(url) { if (!grepl("^[[:alpha:]]+://", url)) { scheme <- if (download_method_secure()) "https://" else "http://" return(paste0(scheme, url)) } url } is_na <- function(x) { length(x) == 1 && is.na(x) } dir.exists <- function(paths) { if (getRversion() < "3.2") { x <- base::file.info(paths)$isdir !is.na(x) & x } else { ("base" %::% "dir.exists")(paths) } } is_binary_pkg <- function(x) { file_ext(x) %in% c("tgz", "zip") } format_str <- function(x, width = Inf, trim = TRUE, justify = "none", ...) { x <- format(x, trim = trim, justify = justify, ...) if (width < Inf) { x_width <- nchar(x, "width") too_wide <- x_width > width if (any(too_wide)) { x[too_wide] <- paste0(substr(x[too_wide], 1, width - 3), "...") } } x } warn_for_potential_errors <- function() { if (sys_type() == "windows" && grepl(" ", R.home()) && getRversion() <= "3.4.2") { warning(immediate. = TRUE, "\n!!! Installation will probably fail!\n", "This version of R has trouble with building and installing packages if\n", "the R HOME directory (currently '", R.home(), "')\n", "has space characters. Possible workarounds include:\n", "- installing R to the C: drive,\n", "- installing it into a path without a space, or\n", "- creating a drive letter for R HOME via the `subst` windows command, and\n", " starting R from the new drive.\n", "See also https://github.com/r-lib/remotes/issues/98\n") } } # Return all directories in the input paths directories <- function(paths) { dirs <- unique(dirname(paths)) out <- dirs[dirs != "."] while(length(dirs) > 0 && any(dirs != ".")) { out <- unique(c(out, dirs[dirs != "."])) dirs <- unique(dirname(dirs)) } sort(out) } in_r_build_ignore <- function(paths, ignore_file) { ignore <- ("tools" %:::% "get_exclude_patterns")() if (file.exists(ignore_file)) { ignore <- c(ignore, readLines(ignore_file, warn = FALSE)) } matches_ignores <- function(x) { any(vlapply(ignore, grepl, x, perl = TRUE, ignore.case = TRUE)) } # We need to search for the paths as well as directories in the path, so # `^foo$` matches `foo/bar` should_ignore <- function(path) { any(vlapply(c(path, directories(path)), matches_ignores)) } vlapply(paths, should_ignore) } dev_split_ref <- function(x) { re_match(x, "^(?[^@#]+)(?[@#].*)?$") } get_json_sha <- function(text) { m <- regexpr(paste0('"sha"\\s*:\\s*"(\\w+)"'), text, perl = TRUE) if (all(m == -1)) { return(json$parse(text)$sha %||% NA_character_) } start <- attr(m, "capture.start") end <- start + attr(m, "capture.length") - 1L substring(text, start, end) } # from tools:::config_val_to_logical config_val_to_logical <- function (val) { v <- tolower(val) if (v %in% c("1", "yes", "true")) TRUE else if (v %in% c("0", "no", "false")) FALSE else { NA } } ## Standalone mode, make sure that we restore the env var on exit old <- Sys.getenv("R_REMOTES_STANDALONE", NA_character_) Sys.setenv("R_REMOTES_STANDALONE" = "true") if (is.na(old)) { on.exit(Sys.unsetenv("R_REMOTES_STANDALONE"), add = TRUE) } else { on.exit(Sys.setenv("R_REMOTES_STANDALONE" = old), add = TRUE) } install_github(...) }