DBItest/0000755000176200001440000000000014157055565011565 5ustar liggesusersDBItest/NAMESPACE0000644000176200001440000000443514157035476013011 0ustar liggesusers# Generated by roxygen2: do not edit by hand S3method("$",DBItest_tweaks) S3method(format,DBItest_tweaks) S3method(print,DBItest_tweaks) export(get_default_context) export(make_context) export(set_default_context) export(test_all) export(test_compliance) export(test_connection) export(test_driver) export(test_getting_started) export(test_meta) export(test_result) export(test_some) export(test_sql) export(test_stress) export(test_transaction) export(tweaks) import(testthat) importFrom(DBI,Id) importFrom(DBI,SQL) importFrom(DBI,dbAppendTable) importFrom(DBI,dbBegin) importFrom(DBI,dbBind) importFrom(DBI,dbBreak) importFrom(DBI,dbCallProc) importFrom(DBI,dbClearResult) importFrom(DBI,dbColumnInfo) importFrom(DBI,dbCommit) importFrom(DBI,dbConnect) importFrom(DBI,dbCreateTable) importFrom(DBI,dbDataType) importFrom(DBI,dbDisconnect) importFrom(DBI,dbExecute) importFrom(DBI,dbExistsTable) importFrom(DBI,dbFetch) importFrom(DBI,dbGetDBIVersion) importFrom(DBI,dbGetInfo) importFrom(DBI,dbGetQuery) importFrom(DBI,dbGetRowCount) importFrom(DBI,dbGetRowsAffected) importFrom(DBI,dbGetStatement) importFrom(DBI,dbHasCompleted) importFrom(DBI,dbIsValid) importFrom(DBI,dbListConnections) importFrom(DBI,dbListFields) importFrom(DBI,dbListObjects) importFrom(DBI,dbListTables) importFrom(DBI,dbQuoteIdentifier) importFrom(DBI,dbQuoteLiteral) importFrom(DBI,dbQuoteString) importFrom(DBI,dbReadTable) importFrom(DBI,dbRemoveTable) importFrom(DBI,dbRollback) importFrom(DBI,dbSendQuery) importFrom(DBI,dbSendStatement) importFrom(DBI,dbSetDataMappings) importFrom(DBI,dbUnquoteIdentifier) importFrom(DBI,dbWithTransaction) importFrom(DBI,dbWriteTable) importFrom(callr,r) importFrom(desc,desc_get_deps) importFrom(lubridate,with_tz) importFrom(methods,extends) importFrom(methods,findMethod) importFrom(methods,getClass) importFrom(methods,getClasses) importFrom(methods,hasMethod) importFrom(methods,is) importFrom(methods,new) importFrom(rlang,":=") importFrom(rlang,abort) importFrom(rlang,enexpr) importFrom(rlang,enquo) importFrom(rlang,enquos) importFrom(rlang,eval_tidy) importFrom(rlang,expr) importFrom(rlang,has_length) importFrom(rlang,is_interactive) importFrom(rlang,list2) importFrom(rlang,quo) importFrom(stats,setNames) importFrom(utils,head) importFrom(withr,with_output_sink) importFrom(withr,with_temp_libpaths) DBItest/.aspell/0000755000176200001440000000000014157034343013112 5ustar liggesusersDBItest/.aspell/defaults.R0000644000176200001440000000023014157034343015037 0ustar liggesusersRd_files <- vignettes <- R_files <- description <- list(encoding = "UTF-8", language = "en", dictionaries = c("en_stats", "DBI")) DBItest/.aspell/DBI.rds0000644000176200001440000000014714157034343014224 0ustar liggesusers‹‹ąb```b`a’Ģ@&ƒs0°0p‚h§ÄäģŌ¼”b(ŸŁÅÉŹd÷M,ŹLtq‚rY}+ƒ} ®€üā’ō¢T„GŖr– ä‚(› Ø,³$ [![rcc](https://github.com/r-dbi/DBItest/workflows/rcc/badge.svg)](https://github.com/r-dbi/DBItest/actions) [![Codecov test coverage](https://codecov.io/gh/r-dbi/DBItest/branch/main/graph/badge.svg)](https://app.codecov.io/gh/r-dbi/DBItest?branch=main) [![CRAN_Status_Badge](https://www.r-pkg.org/badges/version/DBItest)](https://cran.r-project.org/package=DBItest) [![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/3503/badge)](https://bestpractices.coreinfrastructure.org/projects/3503) This package is primarily useful for developers of [DBI](https://dbi.r-dbi.org) backends. It provides a considerable set of test cases for DBI backends. These test cases correspond to the [DBI specification](https://dbi.r-dbi.org/articles/spec). Please follow the steps below to add these test cases to your DBI backend. ## Installation Install from CRAN via ```r install.packages("DBItest") ``` or the development version using ```r devtools::install_github("r-dbi/DBItest") ``` ## Usage In your driver package, add `DBItest` to the `Suggests:` and enable the tests. Run the following code in you package's directory: ```r # install.packages("usethis") usethis::use_package("DBItest", "suggests") usethis::use_test("DBItest") ``` This enables testing using `testthat` (if necessary) and creates, among others, a file `test-DBItest.R` in the `tests/testthat` directory. Replace its entire contents by the following: ```r DBItest::make_context(Kazam(), NULL) DBItest::test_all() ``` This assumes that `Kazam()` returns an instance of your `DBIDriver` class. Additional arguments to `dbConnect()` are specified as named list instead of the `NULL` argument to `make_context()`. The `default_skip` argument to `make_context()` allows skipping tests that are not (yet) satisfied by your backend. Further reading: - Detailed instructions in `vignette("DBItest")` - The feature list in the [original proposal](https://github.com/r-dbi/DBItest/wiki/Proposal). --- Please note that the 'DBItest' project is released with a [Contributor Code of Conduct](https://dbitest.r-dbi.org/code_of_conduct). By contributing to this project, you agree to abide by its terms. DBItest/man/0000755000176200001440000000000014157037563012336 5ustar liggesusersDBItest/man/test_driver.Rd0000644000176200001440000000207514157034343015154 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-driver.R \name{test_driver} \alias{test_driver} \title{Test the "Driver" class} \usage{ test_driver(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test the "Driver" class } \seealso{ Other tests: \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/man/spec_meta_is_valid.Rd0000644000176200001440000000322114157037563016435 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-is-valid.R \docType{data} \name{spec_meta_is_valid} \alias{spec_meta_is_valid} \title{spec_meta_is_valid} \value{ \code{dbIsValid()} returns a logical scalar, \code{TRUE} if the object specified by \code{dbObj} is valid, \code{FALSE} otherwise. A \linkS4class{DBIConnection} object is initially valid, and becomes invalid after disconnecting with \code{\link[=dbDisconnect]{dbDisconnect()}}. For an invalid connection object (e.g., for some drivers if the object is saved to a file and then restored), the method also returns \code{FALSE}. A \linkS4class{DBIResult} object is valid after a call to \code{\link[=dbSendQuery]{dbSendQuery()}}, and stays valid even after all rows have been fetched; only clearing it with \code{\link[=dbClearResult]{dbClearResult()}} invalidates it. A \linkS4class{DBIResult} object is also valid after a call to \code{\link[=dbSendStatement]{dbSendStatement()}}, and stays valid after querying the number of rows affected; only clearing it with \code{\link[=dbClearResult]{dbClearResult()}} invalidates it. If the connection to the database system is dropped (e.g., due to connectivity problems, server failure, etc.), \code{dbIsValid()} should return \code{FALSE}. This is not tested automatically. } \description{ spec_meta_is_valid } \seealso{ Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_bind}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}} } \concept{meta specifications} DBItest/man/spec_driver_connect.Rd0000644000176200001440000000371514157037563016651 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-driver-connect.R \docType{data} \name{spec_driver_connect} \alias{spec_driver_connect} \title{spec_driver_connect} \value{ \code{dbConnect()} returns an S4 object that inherits from \linkS4class{DBIConnection}. This object is used to communicate with the database engine. A \code{\link[=format]{format()}} method is defined for the connection object. It returns a string that consists of a single line of text. } \description{ spec_driver_connect } \section{Specification}{ DBI recommends using the following argument names for authentication parameters, with \code{NULL} default: \itemize{ \item \code{user} for the user name (default: current user) \item \code{password} for the password \item \code{host} for the host name (default: local connection) \item \code{port} for the port number (default: local connection) \item \code{dbname} for the name of the database on the host, or the database file name } The defaults should provide reasonable behavior, in particular a local connection for \code{host = NULL}. For some DBMS (e.g., PostgreSQL), this is different to a TCP/IP connection to \code{localhost}. In addition, DBI supports the \code{bigint} argument that governs how 64-bit integer data is returned. The following values are supported: \itemize{ \item \code{"integer"}: always return as \code{integer}, silently overflow \item \code{"numeric"}: always return as \code{numeric}, silently round \item \code{"character"}: always return the decimal representation as \code{character} \item \code{"integer64"}: return as a data type that can be coerced using \code{\link[=as.integer]{as.integer()}} (with warning on overflow), \code{\link[=as.numeric]{as.numeric()}} and \code{\link[=as.character]{as.character()}} } } \seealso{ Other driver specifications: \code{\link{spec_driver_constructor}}, \code{\link{spec_driver_data_type}}, \code{\link{spec_get_info}} } \concept{driver specifications} DBItest/man/make_placeholder_fun.Rd0000644000176200001440000000151014157035476016752 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-bind-.R \name{make_placeholder_fun} \alias{make_placeholder_fun} \title{Create a function that creates n placeholders} \usage{ make_placeholder_fun(pattern) } \arguments{ \item{pattern}{\verb{[character(1)]}\cr Any character, optionally followed by \code{1} or \code{name}. Examples: \code{"?"}, \code{"$1"}, \code{":name"}} } \value{ \verb{[function(n)]}\cr A function with one argument \code{n} that returns a vector of length \code{n} with placeholders of the specified format. } \description{ For internal use by the \code{placeholder_format} tweak. } \examples{ body(DBItest:::make_placeholder_fun("?")) DBItest:::make_placeholder_fun("?")(2) DBItest:::make_placeholder_fun("$1")(3) DBItest:::make_placeholder_fun(":name")(5) } \keyword{internal} DBItest/man/spec_driver_constructor.Rd0000644000176200001440000000170114157037563017576 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-driver-constructor.R \docType{data} \name{spec_driver_constructor} \alias{spec_driver_constructor} \title{spec_driver_constructor} \description{ spec_driver_constructor } \section{Construction of the DBIDriver object}{ The backend must support creation of an instance of its \linkS4class{DBIDriver} subclass with a \dfn{constructor function}. By default, its name is the package name without the leading \sQuote{R} (if it exists), e.g., \code{SQLite} for the \pkg{RSQLite} package. However, backend authors may choose a different name. The constructor must be exported, and it must be a function that is callable without arguments. DBI recommends to define a constructor with an empty argument list. } \seealso{ Other driver specifications: \code{\link{spec_driver_connect}}, \code{\link{spec_driver_data_type}}, \code{\link{spec_get_info}} } \concept{driver specifications} DBItest/man/spec_sql_list_fields.Rd0000644000176200001440000000324314157037563017021 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-list-fields.R \docType{data} \name{spec_sql_list_fields} \alias{spec_sql_list_fields} \title{spec_sql_list_fields} \value{ \code{dbListFields()} returns a character vector that enumerates all fields in the table in the correct order. This also works for temporary tables if supported by the database. The returned names are suitable for quoting with \code{dbQuoteIdentifier()}. } \description{ spec_sql_list_fields } \section{Failure modes}{ If the table does not exist, an error is raised. Invalid types for the \code{name} argument (e.g., \code{character} of length not equal to one, or numeric) lead to an error. An error is also raised when calling this method for a closed or invalid connection. } \section{Specification}{ The \code{name} argument can be \itemize{ \item a string \item the return value of \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}} \item a value from the \code{table} column from the return value of \code{\link[=dbListObjects]{dbListObjects()}} where \code{is_prefix} is \code{FALSE} } A column named \code{row_names} is treated like any other column. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/spec_sql_quote_literal.Rd0000644000176200001440000000366014157037563017374 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-quote-literal.R \docType{data} \name{spec_sql_quote_literal} \alias{spec_sql_quote_literal} \title{spec_sql_quote_literal} \value{ \code{dbQuoteLiteral()} returns an object that can be coerced to \link{character}, of the same length as the input. For an empty integer, numeric, character, logical, date, time, or blob vector, this function returns a length-0 object. When passing the returned object again to \code{dbQuoteLiteral()} as \code{x} argument, it is returned unchanged. Passing objects of class \link{SQL} should also return them unchanged. (For backends it may be most convenient to return \link{SQL} objects to achieve this behavior, but this is not required.) } \description{ spec_sql_quote_literal } \section{Specification}{ The returned expression can be used in a \verb{SELECT ...} query, and the value of \code{dbGetQuery(paste0("SELECT ", dbQuoteLiteral(x)))[[1]]} must be equal to \code{x} for any scalar integer, numeric, string, and logical. If \code{x} is \code{NA}, the result must merely satisfy \code{\link[=is.na]{is.na()}}. The literals \code{"NA"} or \code{"NULL"} are not treated specially. \code{NA} should be translated to an unquoted SQL \code{NULL}, so that the query \verb{SELECT * FROM (SELECT 1) a WHERE ... IS NULL} returns one row. } \section{Failure modes}{ Passing a list for the \code{x} argument raises an error. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/spec_result_send_statement.Rd0000644000176200001440000000755314157037563020264 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-send-statement.R \docType{data} \name{spec_result_send_statement} \alias{spec_result_send_statement} \title{spec_result_send_statement} \value{ \code{dbSendStatement()} returns an S4 object that inherits from \linkS4class{DBIResult}. The result set can be used with \code{\link[=dbGetRowsAffected]{dbGetRowsAffected()}} to determine the number of rows affected by the query. Once you have finished using a result, make sure to clear it with \code{\link[=dbClearResult]{dbClearResult()}}. } \description{ spec_result_send_statement } \section{Failure modes}{ An error is raised when issuing a statement over a closed or invalid connection, or if the statement is not a non-\code{NA} string. An error is also raised if the syntax of the query is invalid and all query parameters are given (by passing the \code{params} argument) or the \code{immediate} argument is set to \code{TRUE}. } \section{Specification}{ No warnings occur under normal conditions. When done, the DBIResult object must be cleared with a call to \code{\link[=dbClearResult]{dbClearResult()}}. Failure to clear the result set leads to a warning when the connection is closed. If the backend supports only one open result set per connection, issuing a second query invalidates an already open result set and raises a warning. The newly opened result set is valid and must be cleared with \code{dbClearResult()}. The \code{param} argument allows passing query parameters, see \code{\link[=dbBind]{dbBind()}} for details. } \section{Additional arguments}{ The following arguments are not part of the \code{dbSendStatement()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{params} (default: \code{NULL}) \item \code{immediate} (default: \code{NULL}) } They must be provided as named arguments. See the "Specification" sections for details on their usage. } \section{Specification for the \code{immediate} argument}{ The \code{immediate} argument supports distinguishing between "direct" and "prepared" APIs offered by many database drivers. Passing \code{immediate = TRUE} leads to immediate execution of the query or statement, via the "direct" API (if supported by the driver). The default \code{NULL} means that the backend should choose whatever API makes the most sense for the database, and (if relevant) tries the other API if the first attempt fails. A successful second attempt should result in a message that suggests passing the correct \code{immediate} argument. Examples for possible behaviors: \enumerate{ \item DBI backend defaults to \code{immediate = TRUE} internally \enumerate{ \item A query without parameters is passed: query is executed \item A query with parameters is passed: \enumerate{ \item \code{params} not given: rejected immediately by the database because of a syntax error in the query, the backend tries \code{immediate = FALSE} (and gives a message) \item \code{params} given: query is executed using \code{immediate = FALSE} } } \item DBI backend defaults to \code{immediate = FALSE} internally \enumerate{ \item A query without parameters is passed: \enumerate{ \item simple query: query is executed \item "special" query (such as setting a config options): fails, the backend tries \code{immediate = TRUE} (and gives a message) } \item A query with parameters is passed: \enumerate{ \item \code{params} not given: waiting for parameters via \code{\link[=dbBind]{dbBind()}} \item \code{params} given: query is executed } } } } \seealso{ Other result specifications: \code{\link{spec_result_clear_result}}, \code{\link{spec_result_create_table_with_data_type}}, \code{\link{spec_result_execute}}, \code{\link{spec_result_fetch}}, \code{\link{spec_result_get_query}}, \code{\link{spec_result_roundtrip}}, \code{\link{spec_result_send_query}} } \concept{result specifications} DBItest/man/test_data_type.Rd0000644000176200001440000000313414157037563015637 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-driver-data-type.R \name{test_data_type} \alias{test_data_type} \title{test_data_type} \usage{ test_data_type(ctx, dbObj) } \arguments{ \item{ctx, dbObj}{Arguments to internal test function} } \value{ \code{dbDataType()} returns the SQL type that corresponds to the \code{obj} argument as a non-empty character string. For data frames, a character vector with one element per column is returned. } \description{ test_data_type } \section{Failure modes}{ An error is raised for invalid values for the \code{obj} argument such as a \code{NULL} value. } \section{Specification}{ The backend can override the \code{\link[=dbDataType]{dbDataType()}} generic for its driver class. This generic expects an arbitrary object as second argument. To query the values returned by the default implementation, run \code{example(dbDataType, package = "DBI")}. If the backend needs to override this generic, it must accept all basic R data types as its second argument, namely \link{logical}, \link{integer}, \link{numeric}, \link{character}, dates (see \link{Dates}), date-time (see \link{DateTimeClasses}), and \link{difftime}. If the database supports blobs, this method also must accept lists of \link{raw} vectors, and \link[blob:blob]{blob::blob} objects. As-is objects (i.e., wrapped by \code{\link[=I]{I()}}) must be supported and return the same results as their unwrapped counterparts. The SQL data type for \link{factor} and \link{ordered} is the same as for character. The behavior for other object types is not specified. } \keyword{internal} DBItest/man/test_transaction.Rd0000644000176200001440000000212214157034343016177 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-transaction.R \name{test_transaction} \alias{test_transaction} \title{Test transaction functions} \usage{ test_transaction(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test transaction functions } \seealso{ Other tests: \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()} } \concept{tests} DBItest/man/spec_result_roundtrip.Rd0000644000176200001440000000516114157037563017266 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-roundtrip.R \docType{data} \name{spec_result_roundtrip} \alias{spec_result_roundtrip} \title{spec_result_roundtrip} \description{ spec_result_roundtrip } \section{Specification}{ The column types of the returned data frame depend on the data returned: \itemize{ \item \link{integer} (or coercible to an integer) for integer values between -2^31 and 2^31 - 1, with \link{NA} for SQL \code{NULL} values \item \link{numeric} for numbers with a fractional component, with NA for SQL \code{NULL} values \item \link{logical} for Boolean values (some backends may return an integer); with NA for SQL \code{NULL} values \item \link{character} for text, with NA for SQL \code{NULL} values \item lists of \link{raw} for blobs with \link{NULL} entries for SQL NULL values \item coercible using \code{\link[=as.Date]{as.Date()}} for dates, with NA for SQL \code{NULL} values (also applies to the return value of the SQL function \code{current_date}) \item coercible using \code{\link[hms:hms]{hms::as_hms()}} for times, with NA for SQL \code{NULL} values (also applies to the return value of the SQL function \code{current_time}) \item coercible using \code{\link[=as.POSIXct]{as.POSIXct()}} for timestamps, with NA for SQL \code{NULL} values (also applies to the return value of the SQL function \code{current_timestamp}) } If dates and timestamps are supported by the backend, the following R types are used: \itemize{ \item \link{Date} for dates (also applies to the return value of the SQL function \code{current_date}) \item \link{POSIXct} for timestamps (also applies to the return value of the SQL function \code{current_timestamp}) } R has no built-in type with lossless support for the full range of 64-bit or larger integers. If 64-bit integers are returned from a query, the following rules apply: \itemize{ \item Values are returned in a container with support for the full range of valid 64-bit values (such as the \code{integer64} class of the \pkg{bit64} package) \item Coercion to numeric always returns a number that is as close as possible to the true value \item Loss of precision when converting to numeric gives a warning \item Conversion to character always returns a lossless decimal representation of the data } } \seealso{ Other result specifications: \code{\link{spec_result_clear_result}}, \code{\link{spec_result_create_table_with_data_type}}, \code{\link{spec_result_execute}}, \code{\link{spec_result_fetch}}, \code{\link{spec_result_get_query}}, \code{\link{spec_result_send_query}}, \code{\link{spec_result_send_statement}} } \concept{result specifications} DBItest/man/spec_sql_write_table.Rd0000644000176200001440000001375414157037563017031 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-write-table.R \docType{data} \name{spec_sql_write_table} \alias{spec_sql_write_table} \title{spec_sql_write_table} \value{ \code{dbWriteTable()} returns \code{TRUE}, invisibly. } \description{ spec_sql_write_table } \section{Failure modes}{ If the table exists, and both \code{append} and \code{overwrite} arguments are unset, or \code{append = TRUE} and the data frame with the new data has different column names, an error is raised; the remote table remains unchanged. An error is raised when calling this method for a closed or invalid connection. An error is also raised if \code{name} cannot be processed with \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}} or if this results in a non-scalar. Invalid values for the additional arguments \code{row.names}, \code{overwrite}, \code{append}, \code{field.types}, and \code{temporary} (non-scalars, unsupported data types, \code{NA}, incompatible values, duplicate or missing names, incompatible columns) also raise an error. } \section{Additional arguments}{ The following arguments are not part of the \code{dbWriteTable()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{row.names} (default: \code{FALSE}) \item \code{overwrite} (default: \code{FALSE}) \item \code{append} (default: \code{FALSE}) \item \code{field.types} (default: \code{NULL}) \item \code{temporary} (default: \code{FALSE}) } They must be provided as named arguments. See the "Specification" and "Value" sections for details on their usage. } \section{Specification}{ The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbWriteTable()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}}: no more quoting is done } The \code{value} argument must be a data frame with a subset of the columns of the existing table if \code{append = TRUE}. The order of the columns does not matter with \code{append = TRUE}. If the \code{overwrite} argument is \code{TRUE}, an existing table of the same name will be overwritten. This argument doesn't change behavior if the table does not exist yet. If the \code{append} argument is \code{TRUE}, the rows in an existing table are preserved, and the new data are appended. If the table doesn't exist yet, it is created. If the \code{temporary} argument is \code{TRUE}, the table is not available in a second connection and is gone after reconnecting. Not all backends support this argument. A regular, non-temporary table is visible in a second connection, in a pre-existing connection, and after reconnecting to the database. SQL keywords can be used freely in table names, column names, and data. Quotes, commas, spaces, and other special characters such as newlines and tabs, can also be used in the data, and, if the database supports non-syntactic identifiers, also for table names and column names. The following data types must be supported at least, and be read identically with \code{\link[=dbReadTable]{dbReadTable()}}: \itemize{ \item integer \item numeric (the behavior for \code{Inf} and \code{NaN} is not specified) \item logical \item \code{NA} as NULL \item 64-bit values (using \code{"bigint"} as field type); the result can be \itemize{ \item converted to a numeric, which may lose precision, \item converted a character vector, which gives the full decimal representation \item written to another table and read again unchanged } \item character (in both UTF-8 and native encodings), supporting empty strings before and after a non-empty string \item factor (returned as character) \item list of raw (if supported by the database) \item objects of type \link[blob:blob]{blob::blob} (if supported by the database) \item date (if supported by the database; returned as \code{Date}), also for dates prior to 1970 or 1900 or after 2038 \item time (if supported by the database; returned as objects that inherit from \code{difftime}) \item timestamp (if supported by the database; returned as \code{POSIXct} respecting the time zone but not necessarily preserving the input time zone), also for timestamps prior to 1970 or 1900 or after 2038 respecting the time zone but not necessarily preserving the input time zone) } Mixing column types in the same table is supported. The \code{field.types} argument must be a named character vector with at most one entry for each column. It indicates the SQL data type to be used for a new column. If a column is missed from \code{field.types}, the type is inferred from the input data with \code{\link[=dbDataType]{dbDataType()}}. The interpretation of \link{rownames} depends on the \code{row.names} argument, see \code{\link[=sqlRownamesToColumn]{sqlRownamesToColumn()}} for details: \itemize{ \item If \code{FALSE} or \code{NULL}, row names are ignored. \item If \code{TRUE}, row names are converted to a column named "row_names", even if the input data frame only has natural row names from 1 to \code{nrow(...)}. \item If \code{NA}, a column named "row_names" is created if the data has custom row names, no extra column is created in the case of natural row names. \item If a string, this specifies the name of the column in the remote table that contains the row names, even if the input data frame only has natural row names. } The default is \code{row.names = FALSE}. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}} } \concept{sql specifications} DBItest/man/spec_getting_started.Rd0000644000176200001440000000110514157037563017023 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-getting-started.R \docType{data} \name{spec_getting_started} \alias{spec_getting_started} \title{spec_getting_started} \description{ spec_getting_started } \section{Definition}{ A DBI backend is an R package which imports the \pkg{DBI} and \pkg{methods} packages. For better or worse, the names of many existing backends start with \sQuote{R}, e.g., \pkg{RSQLite}, \pkg{RMySQL}, \pkg{RSQLServer}; it is up to the backend author to adopt this convention or not. } \concept{getting specifications} DBItest/man/test_result.Rd0000644000176200001440000000207514157034343015177 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-result.R \name{test_result} \alias{test_result} \title{Test the "Result" class} \usage{ test_result(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test the "Result" class } \seealso{ Other tests: \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/man/spec_compliance_methods.Rd0000644000176200001440000000140614157037563017475 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-compliance-methods.R \docType{data} \name{spec_compliance_methods} \alias{spec_compliance_methods} \title{spec_compliance_methods} \description{ spec_compliance_methods } \section{DBI classes and methods}{ A backend defines three classes, which are subclasses of \linkS4class{DBIDriver}, \linkS4class{DBIConnection}, and \linkS4class{DBIResult}. The backend provides implementation for all methods of these base classes that are defined but not implemented by DBI. All methods defined in \pkg{DBI} are reexported (so that the package can be used without having to attach \pkg{DBI}), and have an ellipsis \code{...} in their formals for extensibility. } \concept{compliance specifications} DBItest/man/spec_sql_create_table.Rd0000644000176200001440000000544114157037563017134 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-create-table.R \docType{data} \name{spec_sql_create_table} \alias{spec_sql_create_table} \title{spec_sql_create_table} \value{ \code{dbCreateTable()} returns \code{TRUE}, invisibly. } \description{ spec_sql_create_table } \section{Failure modes}{ If the table exists, an error is raised; the remote table remains unchanged. An error is raised when calling this method for a closed or invalid connection. An error is also raised if \code{name} cannot be processed with \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}} or if this results in a non-scalar. Invalid values for the \code{row.names} and \code{temporary} arguments (non-scalars, unsupported data types, \code{NA}, incompatible values, duplicate names) also raise an error. } \section{Additional arguments}{ The following arguments are not part of the \code{dbCreateTable()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{temporary} (default: \code{FALSE}) } They must be provided as named arguments. See the "Specification" and "Value" sections for details on their usage. } \section{Specification}{ The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbCreateTable()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}}: no more quoting is done } If the \code{temporary} argument is \code{TRUE}, the table is not available in a second connection and is gone after reconnecting. Not all backends support this argument. A regular, non-temporary table is visible in a second connection, in a pre-existing connection, and after reconnecting to the database. SQL keywords can be used freely in table names, column names, and data. Quotes, commas, and spaces can also be used for table names and column names, if the database supports non-syntactic identifiers. The \code{row.names} argument must be missing or \code{NULL}, the default value. All other values for the \code{row.names} argument (in particular \code{TRUE}, \code{NA}, and a string) raise an error. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/spec_result_execute.Rd0000644000176200001440000000576114157037563016710 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-execute.R \docType{data} \name{spec_result_execute} \alias{spec_result_execute} \title{spec_result_execute} \value{ \code{dbExecute()} always returns a scalar numeric that specifies the number of rows affected by the statement. } \description{ spec_result_execute } \section{Failure modes}{ An error is raised when issuing a statement over a closed or invalid connection, if the syntax of the statement is invalid, or if the statement is not a non-\code{NA} string. } \section{Additional arguments}{ The following arguments are not part of the \code{dbExecute()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{params} (default: \code{NULL}) \item \code{immediate} (default: \code{NULL}) } They must be provided as named arguments. See the "Specification" sections for details on their usage. } \section{Specification}{ The \code{param} argument allows passing query parameters, see \code{\link[=dbBind]{dbBind()}} for details. } \section{Specification for the \code{immediate} argument}{ The \code{immediate} argument supports distinguishing between "direct" and "prepared" APIs offered by many database drivers. Passing \code{immediate = TRUE} leads to immediate execution of the query or statement, via the "direct" API (if supported by the driver). The default \code{NULL} means that the backend should choose whatever API makes the most sense for the database, and (if relevant) tries the other API if the first attempt fails. A successful second attempt should result in a message that suggests passing the correct \code{immediate} argument. Examples for possible behaviors: \enumerate{ \item DBI backend defaults to \code{immediate = TRUE} internally \enumerate{ \item A query without parameters is passed: query is executed \item A query with parameters is passed: \enumerate{ \item \code{params} not given: rejected immediately by the database because of a syntax error in the query, the backend tries \code{immediate = FALSE} (and gives a message) \item \code{params} given: query is executed using \code{immediate = FALSE} } } \item DBI backend defaults to \code{immediate = FALSE} internally \enumerate{ \item A query without parameters is passed: \enumerate{ \item simple query: query is executed \item "special" query (such as setting a config options): fails, the backend tries \code{immediate = TRUE} (and gives a message) } \item A query with parameters is passed: \enumerate{ \item \code{params} not given: waiting for parameters via \code{\link[=dbBind]{dbBind()}} \item \code{params} given: query is executed } } } } \seealso{ Other result specifications: \code{\link{spec_result_clear_result}}, \code{\link{spec_result_create_table_with_data_type}}, \code{\link{spec_result_fetch}}, \code{\link{spec_result_get_query}}, \code{\link{spec_result_roundtrip}}, \code{\link{spec_result_send_query}}, \code{\link{spec_result_send_statement}} } \concept{result specifications} DBItest/man/spec_driver_data_type.Rd0000644000176200001440000000327214157037563017170 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-driver-data-type.R \docType{data} \name{spec_driver_data_type} \alias{spec_driver_data_type} \title{spec_driver_data_type} \value{ \code{dbDataType()} returns the SQL type that corresponds to the \code{obj} argument as a non-empty character string. For data frames, a character vector with one element per column is returned. } \description{ spec_driver_data_type } \section{Failure modes}{ An error is raised for invalid values for the \code{obj} argument such as a \code{NULL} value. } \section{Specification}{ The backend can override the \code{\link[=dbDataType]{dbDataType()}} generic for its driver class. This generic expects an arbitrary object as second argument. To query the values returned by the default implementation, run \code{example(dbDataType, package = "DBI")}. If the backend needs to override this generic, it must accept all basic R data types as its second argument, namely \link{logical}, \link{integer}, \link{numeric}, \link{character}, dates (see \link{Dates}), date-time (see \link{DateTimeClasses}), and \link{difftime}. If the database supports blobs, this method also must accept lists of \link{raw} vectors, and \link[blob:blob]{blob::blob} objects. As-is objects (i.e., wrapped by \code{\link[=I]{I()}}) must be supported and return the same results as their unwrapped counterparts. The SQL data type for \link{factor} and \link{ordered} is the same as for character. The behavior for other object types is not specified. } \seealso{ Other driver specifications: \code{\link{spec_driver_connect}}, \code{\link{spec_driver_constructor}}, \code{\link{spec_get_info}} } \concept{driver specifications} DBItest/man/spec_transaction_begin_commit_rollback.Rd0000644000176200001440000000422714157037563022556 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-transaction-begin-commit-rollback.R \docType{data} \name{spec_transaction_begin_commit_rollback} \alias{spec_transaction_begin_commit_rollback} \title{spec_transaction_begin_commit_rollback} \value{ \code{dbBegin()}, \code{dbCommit()} and \code{dbRollback()} return \code{TRUE}, invisibly. } \description{ spec_transaction_begin_commit_rollback } \section{Failure modes}{ The implementations are expected to raise an error in case of failure, but this is not tested. In any way, all generics throw an error with a closed or invalid connection. In addition, a call to \code{dbCommit()} or \code{dbRollback()} without a prior call to \code{dbBegin()} raises an error. Nested transactions are not supported by DBI, an attempt to call \code{dbBegin()} twice yields an error. } \section{Specification}{ Actual support for transactions may vary between backends. A transaction is initiated by a call to \code{dbBegin()} and committed by a call to \code{dbCommit()}. Data written in a transaction must persist after the transaction is committed. For example, a record that is missing when the transaction is started but is created during the transaction must exist both during and after the transaction, and also in a new connection. A transaction can also be aborted with \code{dbRollback()}. All data written in such a transaction must be removed after the transaction is rolled back. For example, a record that is missing when the transaction is started but is created during the transaction must not exist anymore after the rollback. Disconnection from a connection with an open transaction effectively rolls back the transaction. All data written in such a transaction must be removed after the transaction is rolled back. The behavior is not specified if other arguments are passed to these functions. In particular, \pkg{RSQLite} issues named transactions with support for nesting if the \code{name} argument is set. The transaction isolation level is not specified by DBI. } \seealso{ Other transaction specifications: \code{\link{spec_transaction_with_transaction}} } \concept{transaction specifications} DBItest/man/spec_meta_has_completed.Rd0000644000176200001440000000320414157037563017453 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-has-completed.R \docType{data} \name{spec_meta_has_completed} \alias{spec_meta_has_completed} \title{spec_meta_has_completed} \value{ \code{dbHasCompleted()} returns a logical scalar. For a query initiated by \code{\link[=dbSendQuery]{dbSendQuery()}} with non-empty result set, \code{dbHasCompleted()} returns \code{FALSE} initially and \code{TRUE} after calling \code{\link[=dbFetch]{dbFetch()}} without limit. For a query initiated by \code{\link[=dbSendStatement]{dbSendStatement()}}, \code{dbHasCompleted()} always returns \code{TRUE}. } \description{ spec_meta_has_completed } \section{Failure modes}{ Attempting to query completion status for a result set cleared with \code{\link[=dbClearResult]{dbClearResult()}} gives an error. } \section{Specification}{ The completion status for a query is only guaranteed to be set to \code{FALSE} after attempting to fetch past the end of the entire result. Therefore, for a query with an empty result set, the initial return value is unspecified, but the result value is \code{TRUE} after trying to fetch only one row. Similarly, for a query with a result set of length n, the return value is unspecified after fetching n rows, but the result value is \code{TRUE} after trying to fetch only one more row. } \seealso{ Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_bind}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_is_valid}} } \concept{meta specifications} DBItest/man/spec_sql_remove_table.Rd0000644000176200001440000000515714157037563017172 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-remove-table.R \docType{data} \name{spec_sql_remove_table} \alias{spec_sql_remove_table} \title{spec_sql_remove_table} \value{ \code{dbRemoveTable()} returns \code{TRUE}, invisibly. } \description{ spec_sql_remove_table } \section{Failure modes}{ If the table does not exist, an error is raised. An attempt to remove a view with this function may result in an error. An error is raised when calling this method for a closed or invalid connection. An error is also raised if \code{name} cannot be processed with \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}} or if this results in a non-scalar. } \section{Additional arguments}{ The following arguments are not part of the \code{dbRemoveTable()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{temporary} (default: \code{FALSE}) \item \code{fail_if_missing} (default: \code{TRUE}) } These arguments must be provided as named arguments. If \code{temporary} is \code{TRUE}, the call to \code{dbRemoveTable()} will consider only temporary tables. Not all backends support this argument. In particular, permanent tables of the same name are left untouched. If \code{fail_if_missing} is \code{FALSE}, the call to \code{dbRemoveTable()} succeeds if the table does not exist. } \section{Specification}{ A table removed by \code{dbRemoveTable()} doesn't appear in the list of tables returned by \code{\link[=dbListTables]{dbListTables()}}, and \code{\link[=dbExistsTable]{dbExistsTable()}} returns \code{FALSE}. The removal propagates immediately to other connections to the same database. This function can also be used to remove a temporary table. The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbRemoveTable()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}}: no more quoting is done } } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/test_meta.Rd0000644000176200001440000000206714157034343014610 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-meta.R \name{test_meta} \alias{test_meta} \title{Test metadata functions} \usage{ test_meta(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test metadata functions } \seealso{ Other tests: \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/man/spec_meta_bind.Rd0000644000176200001440000001257214157037563015570 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-bind-runner.R, R/spec-meta-bind.R \docType{data} \name{spec_meta_bind} \alias{spec_meta_bind} \title{spec_meta_bind} \value{ \code{dbBind()} returns the result set, invisibly, for queries issued by \code{\link[=dbSendQuery]{dbSendQuery()}} and also for data manipulation statements issued by \code{\link[=dbSendStatement]{dbSendStatement()}}. } \description{ spec_meta_bind spec_meta_bind } \section{Specification}{ \pkg{DBI} clients execute parametrized statements as follows: \enumerate{ \item Call \code{\link[=dbSendQuery]{dbSendQuery()}} or \code{\link[=dbSendStatement]{dbSendStatement()}} with a query or statement that contains placeholders, store the returned \linkS4class{DBIResult} object in a variable. Mixing placeholders (in particular, named and unnamed ones) is not recommended. It is good practice to register a call to \code{\link[=dbClearResult]{dbClearResult()}} via \code{\link[=on.exit]{on.exit()}} right after calling \code{dbSendQuery()} or \code{dbSendStatement()} (see the last enumeration item). Until \code{dbBind()} has been called, the returned result set object has the following behavior: \itemize{ \item \code{\link[=dbFetch]{dbFetch()}} raises an error (for \code{dbSendQuery()}) \item \code{\link[=dbGetRowCount]{dbGetRowCount()}} returns zero (for \code{dbSendQuery()}) \item \code{\link[=dbGetRowsAffected]{dbGetRowsAffected()}} returns an integer \code{NA} (for \code{dbSendStatement()}) \item \code{\link[=dbIsValid]{dbIsValid()}} returns \code{TRUE} \item \code{\link[=dbHasCompleted]{dbHasCompleted()}} returns \code{FALSE} } \item Construct a list with parameters that specify actual values for the placeholders. The list must be named or unnamed, depending on the kind of placeholders used. Named values are matched to named parameters, unnamed values are matched by position in the list of parameters. All elements in this list must have the same lengths and contain values supported by the backend; a \link{data.frame} is internally stored as such a list. The parameter list is passed to a call to \code{dbBind()} on the \code{DBIResult} object. \item Retrieve the data or the number of affected rows from the \code{DBIResult} object. \itemize{ \item For queries issued by \code{dbSendQuery()}, call \code{\link[=dbFetch]{dbFetch()}}. \item For statements issued by \code{dbSendStatements()}, call \code{\link[=dbGetRowsAffected]{dbGetRowsAffected()}}. (Execution begins immediately after the \code{dbBind()} call, the statement is processed entirely before the function returns.) } \item Repeat 2. and 3. as necessary. \item Close the result set via \code{\link[=dbClearResult]{dbClearResult()}}. } The elements of the \code{params} argument do not need to be scalars, vectors of arbitrary length (including length 0) are supported. For queries, calling \code{dbFetch()} binding such parameters returns concatenated results, equivalent to binding and fetching for each set of values and connecting via \code{\link[=rbind]{rbind()}}. For data manipulation statements, \code{dbGetRowsAffected()} returns the total number of rows affected if binding non-scalar parameters. \code{dbBind()} also accepts repeated calls on the same result set for both queries and data manipulation statements, even if no results are fetched between calls to \code{dbBind()}, for both queries and data manipulation statements. If the placeholders in the query are named, their order in the \code{params} argument is not important. At least the following data types are accepted on input (including \link{NA}): \itemize{ \item \link{integer} \item \link{numeric} \item \link{logical} for Boolean values \item \link{character} (also with special characters such as spaces, newlines, quotes, and backslashes) \item \link{factor} (bound as character, with warning) \item \link{Date} (also when stored internally as integer) \item \link{POSIXct} timestamps \item \link{POSIXlt} timestamps \item \link{difftime} values (also with units other than seconds and with the value stored as integer) \item lists of \link{raw} for blobs (with \code{NULL} entries for SQL NULL values) \item objects of type \link[blob:blob]{blob::blob} } } \section{Failure modes}{ Calling \code{dbBind()} for a query without parameters raises an error. Binding too many or not enough values, or parameters with wrong names or unequal length, also raises an error. If the placeholders in the query are named, all parameter values must have names (which must not be empty or \code{NA}), and vice versa, otherwise an error is raised. The behavior for mixing placeholders of different types (in particular mixing positional and named placeholders) is not specified. Calling \code{dbBind()} on a result set already cleared by \code{\link[=dbClearResult]{dbClearResult()}} also raises an error. } \seealso{ Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} } \concept{meta specifications} DBItest/man/spec_sql_read_table.Rd0000644000176200001440000000644314157037563016607 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-read-table.R \docType{data} \name{spec_sql_read_table} \alias{spec_sql_read_table} \title{spec_sql_read_table} \value{ \code{dbReadTable()} returns a data frame that contains the complete data from the remote table, effectively the result of calling \code{\link[=dbGetQuery]{dbGetQuery()}} with \verb{SELECT * FROM }. An empty table is returned as a data frame with zero rows. The presence of \link{rownames} depends on the \code{row.names} argument, see \code{\link[=sqlColumnToRownames]{sqlColumnToRownames()}} for details: \itemize{ \item If \code{FALSE} or \code{NULL}, the returned data frame doesn't have row names. \item If \code{TRUE}, a column named "row_names" is converted to row names. } \itemize{ \item If \code{NA}, a column named "row_names" is converted to row names if it exists, otherwise no translation occurs. \item If a string, this specifies the name of the column in the remote table that contains the row names. } The default is \code{row.names = FALSE}. If the database supports identifiers with special characters, the columns in the returned data frame are converted to valid R identifiers if the \code{check.names} argument is \code{TRUE}, If \code{check.names = FALSE}, the returned table has non-syntactic column names without quotes. } \description{ spec_sql_read_table } \section{Failure modes}{ An error is raised if the table does not exist. An error is raised if \code{row.names} is \code{TRUE} and no "row_names" column exists, An error is raised if \code{row.names} is set to a string and and no corresponding column exists. An error is raised when calling this method for a closed or invalid connection. An error is raised if \code{name} cannot be processed with \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}} or if this results in a non-scalar. Unsupported values for \code{row.names} and \code{check.names} (non-scalars, unsupported data types, \code{NA} for \code{check.names}) also raise an error. } \section{Additional arguments}{ The following arguments are not part of the \code{dbReadTable()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{row.names} (default: \code{FALSE}) \item \code{check.names} } They must be provided as named arguments. See the "Value" section for details on their usage. } \section{Specification}{ The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbReadTable()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}}: no more quoting is done } } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/test_connection.Rd0000644000176200001440000000212114157034343016010 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-connection.R \name{test_connection} \alias{test_connection} \title{Test the "Connection" class} \usage{ test_connection(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test the "Connection" class } \seealso{ Other tests: \code{\link{test_compliance}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/man/spec_meta_get_rows_affected.Rd0000644000176200001440000000226114157037563020320 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-get-rows-affected.R \docType{data} \name{spec_meta_get_rows_affected} \alias{spec_meta_get_rows_affected} \title{spec_meta_get_rows_affected} \value{ \code{dbGetRowsAffected()} returns a scalar number (integer or numeric), the number of rows affected by a data manipulation statement issued with \code{\link[=dbSendStatement]{dbSendStatement()}}. The value is available directly after the call and does not change after calling \code{\link[=dbFetch]{dbFetch()}}. For queries issued with \code{\link[=dbSendQuery]{dbSendQuery()}}, zero is returned before and after the call to \code{dbFetch()}. } \description{ spec_meta_get_rows_affected } \section{Failure modes}{ Attempting to get the rows affected for a result set cleared with \code{\link[=dbClearResult]{dbClearResult()}} gives an error. } \seealso{ Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_bind}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} } \concept{meta specifications} DBItest/man/spec_result_fetch.Rd0000644000176200001440000000401314157037563016324 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-fetch.R \docType{data} \name{spec_result_fetch} \alias{spec_result_fetch} \title{spec_result_fetch} \value{ \code{dbFetch()} always returns a \link{data.frame} with as many rows as records were fetched and as many columns as fields in the result set, even if the result is a single value or has one or zero rows. } \description{ spec_result_fetch } \section{Failure modes}{ An attempt to fetch from a closed result set raises an error. If the \code{n} argument is not an atomic whole number greater or equal to -1 or Inf, an error is raised, but a subsequent call to \code{dbFetch()} with proper \code{n} argument succeeds. Calling \code{dbFetch()} on a result set from a data manipulation query created by \code{\link[=dbSendStatement]{dbSendStatement()}} can be fetched and return an empty data frame, with a warning. } \section{Specification}{ Fetching multi-row queries with one or more columns by default returns the entire result. Multi-row queries can also be fetched progressively by passing a whole number (\link{integer} or \link{numeric}) as the \code{n} argument. A value of \link{Inf} for the \code{n} argument is supported and also returns the full result. If more rows than available are fetched, the result is returned in full without warning. If fewer rows than requested are returned, further fetches will return a data frame with zero rows. If zero rows are fetched, the columns of the data frame are still fully typed. Fetching fewer rows than available is permitted, no warning is issued when clearing the result set. A column named \code{row_names} is treated like any other column. } \seealso{ Other result specifications: \code{\link{spec_result_clear_result}}, \code{\link{spec_result_create_table_with_data_type}}, \code{\link{spec_result_execute}}, \code{\link{spec_result_get_query}}, \code{\link{spec_result_roundtrip}}, \code{\link{spec_result_send_query}}, \code{\link{spec_result_send_statement}} } \concept{result specifications} DBItest/man/spec_result_clear_result.Rd0000644000176200001440000000223614157037563017724 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-clear-result.R \docType{data} \name{spec_result_clear_result} \alias{spec_result_clear_result} \title{spec_result_clear_result} \value{ \code{dbClearResult()} returns \code{TRUE}, invisibly, for result sets obtained from both \code{dbSendQuery()} and \code{dbSendStatement()}. } \description{ spec_result_clear_result } \section{Failure modes}{ An attempt to close an already closed result set issues a warning in both cases. } \section{Specification}{ \code{dbClearResult()} frees all resources associated with retrieving the result of a query or update operation. The DBI backend can expect a call to \code{dbClearResult()} for each \code{\link[=dbSendQuery]{dbSendQuery()}} or \code{\link[=dbSendStatement]{dbSendStatement()}} call. } \seealso{ Other result specifications: \code{\link{spec_result_create_table_with_data_type}}, \code{\link{spec_result_execute}}, \code{\link{spec_result_fetch}}, \code{\link{spec_result_get_query}}, \code{\link{spec_result_roundtrip}}, \code{\link{spec_result_send_query}}, \code{\link{spec_result_send_statement}} } \concept{result specifications} DBItest/man/spec_meta_get_statement.Rd0000644000176200001440000000160614157037563017513 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-get-statement.R \docType{data} \name{spec_meta_get_statement} \alias{spec_meta_get_statement} \title{spec_meta_get_statement} \value{ \code{dbGetStatement()} returns a string, the query used in either \code{\link[=dbSendQuery]{dbSendQuery()}} or \code{\link[=dbSendStatement]{dbSendStatement()}}. } \description{ spec_meta_get_statement } \section{Failure modes}{ Attempting to query the statement for a result set cleared with \code{\link[=dbClearResult]{dbClearResult()}} gives an error. } \seealso{ Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_bind}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} } \concept{meta specifications} DBItest/man/spec_result_create_table_with_data_type.Rd0000644000176200001440000000147014157037563022736 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-create-table-with-data-type.R \docType{data} \name{spec_result_create_table_with_data_type} \alias{spec_result_create_table_with_data_type} \title{spec_result_create_table_with_data_type} \description{ spec_result_create_table_with_data_type } \section{Specification}{ All data types returned by \code{dbDataType()} are usable in an SQL statement of the form \code{"CREATE TABLE test (a ...)"}. } \seealso{ Other result specifications: \code{\link{spec_result_clear_result}}, \code{\link{spec_result_execute}}, \code{\link{spec_result_fetch}}, \code{\link{spec_result_get_query}}, \code{\link{spec_result_roundtrip}}, \code{\link{spec_result_send_query}}, \code{\link{spec_result_send_statement}} } \concept{result specifications} DBItest/man/spec_sql_list_objects.Rd0000644000176200001440000000542614157037563017211 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-list-objects.R \docType{data} \name{spec_sql_list_objects} \alias{spec_sql_list_objects} \title{spec_sql_list_objects} \value{ \code{dbListObjects()} returns a data frame with columns \code{table} and \code{is_prefix} (in that order), optionally with other columns with a dot (\code{.}) prefix. The \code{table} column is of type list. Each object in this list is suitable for use as argument in \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}}. The \code{is_prefix} column is a logical. This data frame contains one row for each object (schema, table and view) accessible from the prefix (if passed) or from the global namespace (if prefix is omitted). Tables added with \code{\link[=dbWriteTable]{dbWriteTable()}} are part of the data frame. As soon a table is removed from the database, it is also removed from the data frame of database objects. The same applies to temporary objects if supported by the database. The returned names are suitable for quoting with \code{dbQuoteIdentifier()}. } \description{ spec_sql_list_objects } \section{Failure modes}{ An error is raised when calling this method for a closed or invalid connection. } \section{Specification}{ The \code{table} object can be quoted with \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}}. The result of quoting can be passed to \code{\link[=dbUnquoteIdentifier]{dbUnquoteIdentifier()}}. The unquoted results are equal to the original \code{table} object. (For backends it may be convenient to use the \link{Id} class, but this is not required.) The \code{prefix} column indicates if the \code{table} value refers to a table or a prefix. For a call with the default \code{prefix = NULL}, the \code{table} values that have \code{is_prefix == FALSE} correspond to the tables returned from \code{\link[=dbListTables]{dbListTables()}}, Values in \code{table} column that have \code{is_prefix == TRUE} can be passed as the \code{prefix} argument to another call to \code{dbListObjects()}. For the data frame returned from a \code{dbListObject()} call with the \code{prefix} argument set, all \code{table} values where \code{is_prefix} is \code{FALSE} can be used in a call to \code{\link[=dbExistsTable]{dbExistsTable()}} which returns \code{TRUE}. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/spec_sql_append_table.Rd0000644000176200001440000000754714157037563017151 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-append-table.R \docType{data} \name{spec_sql_append_table} \alias{spec_sql_append_table} \title{spec_sql_append_table} \value{ \code{dbAppendTable()} returns a scalar numeric. } \description{ spec_sql_append_table } \section{Failure modes}{ If the table does not exist, or the new data in \code{values} is not a data frame or has different column names, an error is raised; the remote table remains unchanged. An error is raised when calling this method for a closed or invalid connection. An error is also raised if \code{name} cannot be processed with \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}} or if this results in a non-scalar. Invalid values for the \code{row.names} argument (non-scalars, unsupported data types, \code{NA}) also raise an error. Passing a \code{value} argument different to \code{NULL} to the \code{row.names} argument (in particular \code{TRUE}, \code{NA}, and a string) raises an error. } \section{Specification}{ SQL keywords can be used freely in table names, column names, and data. Quotes, commas, spaces, and other special characters such as newlines and tabs, can also be used in the data, and, if the database supports non-syntactic identifiers, also for table names and column names. The following data types must be supported at least, and be read identically with \code{\link[=dbReadTable]{dbReadTable()}}: \itemize{ \item integer \item numeric (the behavior for \code{Inf} and \code{NaN} is not specified) \item logical \item \code{NA} as NULL \item 64-bit values (using \code{"bigint"} as field type); the result can be \itemize{ \item converted to a numeric, which may lose precision, \item converted a character vector, which gives the full decimal representation \item written to another table and read again unchanged } \item character (in both UTF-8 and native encodings), supporting empty strings (before and after non-empty strings) \item factor (returned as character, with a warning) \item list of raw (if supported by the database) \item objects of type \link[blob:blob]{blob::blob} (if supported by the database) \item date (if supported by the database; returned as \code{Date}) also for dates prior to 1970 or 1900 or after 2038 \item time (if supported by the database; returned as objects that inherit from \code{difftime}) \item timestamp (if supported by the database; returned as \code{POSIXct} respecting the time zone but not necessarily preserving the input time zone), also for timestamps prior to 1970 or 1900 or after 2038 respecting the time zone but not necessarily preserving the input time zone) } Mixing column types in the same table is supported. The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbAppendTable()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}}: no more quoting is done to support databases that allow non-syntactic names for their objects: } The \code{row.names} argument must be \code{NULL}, the default value. Row names are ignored. The \code{value} argument must be a data frame with a subset of the columns of the existing table. The order of the columns does not matter. } \seealso{ Other sql specifications: \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/spec_sql_unquote_identifier.Rd0000644000176200001440000000440614157037563020424 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-unquote-identifier.R \docType{data} \name{spec_sql_unquote_identifier} \alias{spec_sql_unquote_identifier} \title{spec_sql_unquote_identifier} \value{ \code{dbUnquoteIdentifier()} returns a list of objects of the same length as the input. For an empty character vector this function returns a length-0 object. The names of the input argument are preserved in the output. When passing the first element of a returned object again to \code{dbUnquoteIdentifier()} as \code{x} argument, it is returned unchanged (but wrapped in a list). Passing objects of class \link{Id} should also return them unchanged (but wrapped in a list). (For backends it may be most convenient to return \link{Id} objects to achieve this behavior, but this is not required.) } \description{ spec_sql_unquote_identifier } \section{Failure modes}{ An error is raised if plain character vectors are passed as the \code{x} argument. } \section{Specification}{ For any character vector of length one, quoting (with \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}}) then unquoting then quoting the first element is identical to just quoting. This is also true for strings that contain special characters such as a space, a dot, a comma, or quotes used to mark strings or identifiers, if the database supports this. Unquoting simple strings (consisting of only letters) wrapped with \code{\link[=SQL]{SQL()}} and then quoting via \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}} gives the same result as just quoting the string. Similarly, unquoting expressions of the form \code{SQL("schema.table")} and then quoting gives the same result as quoting the identifier constructed by \code{Id(schema = "schema", table = "table")}. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/test_compliance.Rd0000644000176200001440000000212114157034343015763 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-compliance.R \name{test_compliance} \alias{test_compliance} \title{Test full compliance to DBI} \usage{ test_compliance(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test full compliance to DBI } \seealso{ Other tests: \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/man/tweaks.Rd0000644000176200001440000001055514157034343014122 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tweaks.R \name{tweaks} \alias{tweaks} \title{Tweaks for DBI tests} \usage{ tweaks( ..., constructor_name = NULL, constructor_relax_args = FALSE, strict_identifier = FALSE, omit_blob_tests = FALSE, current_needs_parens = FALSE, union = function(x) paste(x, collapse = " UNION "), placeholder_pattern = NULL, logical_return = identity, date_cast = function(x) paste0("date('", x, "')"), time_cast = function(x) paste0("time('", x, "')"), timestamp_cast = function(x) paste0("timestamp('", x, "')"), blob_cast = identity, date_typed = TRUE, time_typed = TRUE, timestamp_typed = TRUE, temporary_tables = TRUE, list_temporary_tables = TRUE, is_null_check = function(x) paste0("(", x, " IS NULL)"), create_table_as = function(table_name, query) paste0("CREATE TABLE ", table_name, " AS ", query), dbitest_version = "1.7.1" ) } \arguments{ \item{...}{\verb{[any]}\cr Unknown tweaks are accepted, with a warning. The ellipsis also makes sure that you only can pass named arguments.} \item{constructor_name}{\verb{[character(1)]}\cr Name of the function that constructs the \code{Driver} object.} \item{constructor_relax_args}{\verb{[logical(1)]}\cr If \code{TRUE}, allow a driver constructor with default values for all arguments; otherwise, require a constructor with empty argument list (default).} \item{strict_identifier}{\verb{[logical(1)]}\cr Set to \code{TRUE} if the DBMS does not support arbitrarily-named identifiers even when quoting is used.} \item{omit_blob_tests}{\verb{[logical(1)]}\cr Set to \code{TRUE} if the DBMS does not support a \code{BLOB} data type.} \item{current_needs_parens}{\verb{[logical(1)]}\cr Set to \code{TRUE} if the SQL functions \code{current_date}, \code{current_time}, and \code{current_timestamp} require parentheses.} \item{union}{\verb{[function(character)]}\cr Function that combines several subqueries into one so that the resulting query returns the concatenated results of the subqueries} \item{placeholder_pattern}{\verb{[character]}\cr A pattern for placeholders used in \code{\link[=dbBind]{dbBind()}}, e.g., \code{"?"}, \code{"$1"}, or \code{":name"}. See \code{\link[=make_placeholder_fun]{make_placeholder_fun()}} for details.} \item{logical_return}{\verb{[function(logical)]}\cr A vectorized function that converts logical values to the data type returned by the DBI backend.} \item{date_cast}{\verb{[function(character)]}\cr A vectorized function that creates an SQL expression for coercing a string to a date value.} \item{time_cast}{\verb{[function(character)]}\cr A vectorized function that creates an SQL expression for coercing a string to a time value.} \item{timestamp_cast}{\verb{[function(character)]}\cr A vectorized function that creates an SQL expression for coercing a string to a timestamp value.} \item{blob_cast}{\verb{[function(character)]}\cr A vectorized function that creates an SQL expression for coercing a string to a blob value.} \item{date_typed}{\verb{[logical(1L)]}\cr Set to \code{FALSE} if the DBMS doesn't support a dedicated type for dates.} \item{time_typed}{\verb{[logical(1L)]}\cr Set to \code{FALSE} if the DBMS doesn't support a dedicated type for times.} \item{timestamp_typed}{\verb{[logical(1L)]}\cr Set to \code{FALSE} if the DBMS doesn't support a dedicated type for timestamps.} \item{temporary_tables}{\verb{[logical(1L)]}\cr Set to \code{FALSE} if the DBMS doesn't support temporary tables.} \item{list_temporary_tables}{\verb{[logical(1L)]}\cr Set to \code{FALSE} if the DBMS doesn't support listing temporary tables.} \item{is_null_check}{\verb{[function(character)]}\cr A vectorized function that creates an SQL expression for checking if a value is \code{NULL}.} \item{create_table_as}{\verb{[function(character(1), character(1))]}\cr A function that creates an SQL expression for creating a table from an SQL expression.} \item{dbitest_version}{\verb{[character(1)]}\cr Compatible DBItest version, default: "1.7.1".} } \description{ The tweaks are a way to control the behavior of certain tests. Currently, you need to search the \pkg{DBItest} source code to understand which tests are affected by which tweaks. This function is usually called to set the \code{tweaks} argument in a \code{\link[=make_context]{make_context()}} call. } \examples{ \dontrun{ make_context(..., tweaks = tweaks(strict_identifier = TRUE)) } } DBItest/man/test_all.Rd0000644000176200001440000000473314157034343014434 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-all.R, R/test-getting-started.R, % R/test-driver.R, R/test-connection.R, R/test-result.R, R/test-sql.R, % R/test-meta.R, R/test-transaction.R, R/test-compliance.R, R/test-stress.R \name{test_all} \alias{test_all} \alias{test_some} \title{Run all tests} \usage{ test_all(skip = NULL, run_only = NULL, ctx = get_default_context()) test_some(test, ctx = get_default_context(), dblog = TRUE) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} \item{test}{\verb{[character]}\cr A character vector of regular expressions describing the tests to run. The regular expressions are matched against the entire test name.} \item{dblog}{\verb{[logical(1)]}\cr Set to \code{FALSE} to disable dblog integration.} } \description{ \code{test_all()} calls all tests defined in this package (see the section "Tests" below). This function supports running only one test by setting an environment variable, e.g., set the \code{DBITEST_ONLY_RESULT} to a nonempty value to run only \code{test_result()}. \code{test_some()} allows testing one or more tests. } \details{ Internally \code{^} and \code{$} are used as prefix and suffix around the regular expressions passed in the \code{skip} and \code{run_only} arguments. } \section{Tests}{ This function runs the following tests, except the stress tests: \code{\link[=test_getting_started]{test_getting_started()}}: Getting started with testing \code{\link[=test_driver]{test_driver()}}: Test the "Driver" class \code{\link[=test_connection]{test_connection()}}: Test the "Connection" class \code{\link[=test_result]{test_result()}}: Test the "Result" class \code{\link[=test_sql]{test_sql()}}: Test SQL methods \code{\link[=test_meta]{test_meta()}}: Test metadata functions \code{\link[=test_transaction]{test_transaction()}}: Test transaction functions \code{\link[=test_compliance]{test_compliance()}}: Test full compliance to DBI \code{\link[=test_stress]{test_stress()}}: Stress tests (not tested with \code{test_all}) } DBItest/man/test_sql.Rd0000644000176200001440000000204614157034343014456 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-sql.R \name{test_sql} \alias{test_sql} \title{Test SQL methods} \usage{ test_sql(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test SQL methods } \seealso{ Other tests: \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/man/spec_sql_exists_table.Rd0000644000176200001440000000337014157037563017207 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-exists-table.R \docType{data} \name{spec_sql_exists_table} \alias{spec_sql_exists_table} \title{spec_sql_exists_table} \value{ \code{dbExistsTable()} returns a logical scalar, \code{TRUE} if the table or view specified by the \code{name} argument exists, \code{FALSE} otherwise. This includes temporary tables if supported by the database. } \description{ spec_sql_exists_table } \section{Failure modes}{ An error is raised when calling this method for a closed or invalid connection. An error is also raised if \code{name} cannot be processed with \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}} or if this results in a non-scalar. } \section{Specification}{ The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbExistsTable()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[=dbQuoteIdentifier]{dbQuoteIdentifier()}}: no more quoting is done } For all tables listed by \code{\link[=dbListTables]{dbListTables()}}, \code{dbExistsTable()} returns \code{TRUE}. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/spec_result_get_query.Rd0000644000176200001440000000762314157037563017251 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-get-query.R \docType{data} \name{spec_result_get_query} \alias{spec_result_get_query} \title{spec_result_get_query} \value{ \code{dbGetQuery()} always returns a \link{data.frame} with as many rows as records were fetched and as many columns as fields in the result set, even if the result is a single value or has one or zero rows. } \description{ spec_result_get_query } \section{Failure modes}{ An error is raised when issuing a query over a closed or invalid connection, if the syntax of the query is invalid, or if the query is not a non-\code{NA} string. If the \code{n} argument is not an atomic whole number greater or equal to -1 or Inf, an error is raised, but a subsequent call to \code{dbGetQuery()} with proper \code{n} argument succeeds. } \section{Additional arguments}{ The following arguments are not part of the \code{dbGetQuery()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{n} (default: -1) \item \code{params} (default: \code{NULL}) \item \code{immediate} (default: \code{NULL}) } They must be provided as named arguments. See the "Specification" and "Value" sections for details on their usage. } \section{Specification}{ A column named \code{row_names} is treated like any other column. The \code{n} argument specifies the number of rows to be fetched. If omitted, fetching multi-row queries with one or more columns returns the entire result. A value of \link{Inf} for the \code{n} argument is supported and also returns the full result. If more rows than available are fetched (by passing a too large value for \code{n}), the result is returned in full without warning. If zero rows are requested, the columns of the data frame are still fully typed. Fetching fewer rows than available is permitted, no warning is issued. The \code{param} argument allows passing query parameters, see \code{\link[=dbBind]{dbBind()}} for details. } \section{Specification for the \code{immediate} argument}{ The \code{immediate} argument supports distinguishing between "direct" and "prepared" APIs offered by many database drivers. Passing \code{immediate = TRUE} leads to immediate execution of the query or statement, via the "direct" API (if supported by the driver). The default \code{NULL} means that the backend should choose whatever API makes the most sense for the database, and (if relevant) tries the other API if the first attempt fails. A successful second attempt should result in a message that suggests passing the correct \code{immediate} argument. Examples for possible behaviors: \enumerate{ \item DBI backend defaults to \code{immediate = TRUE} internally \enumerate{ \item A query without parameters is passed: query is executed \item A query with parameters is passed: \enumerate{ \item \code{params} not given: rejected immediately by the database because of a syntax error in the query, the backend tries \code{immediate = FALSE} (and gives a message) \item \code{params} given: query is executed using \code{immediate = FALSE} } } \item DBI backend defaults to \code{immediate = FALSE} internally \enumerate{ \item A query without parameters is passed: \enumerate{ \item simple query: query is executed \item "special" query (such as setting a config options): fails, the backend tries \code{immediate = TRUE} (and gives a message) } \item A query with parameters is passed: \enumerate{ \item \code{params} not given: waiting for parameters via \code{\link[=dbBind]{dbBind()}} \item \code{params} given: query is executed } } } } \seealso{ Other result specifications: \code{\link{spec_result_clear_result}}, \code{\link{spec_result_create_table_with_data_type}}, \code{\link{spec_result_execute}}, \code{\link{spec_result_fetch}}, \code{\link{spec_result_roundtrip}}, \code{\link{spec_result_send_query}}, \code{\link{spec_result_send_statement}} } \concept{result specifications} DBItest/man/spec_result_send_query.Rd0000644000176200001440000000742314157037563017421 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-send-query.R \docType{data} \name{spec_result_send_query} \alias{spec_result_send_query} \title{spec_result_send_query} \value{ \code{dbSendQuery()} returns an S4 object that inherits from \linkS4class{DBIResult}. The result set can be used with \code{\link[=dbFetch]{dbFetch()}} to extract records. Once you have finished using a result, make sure to clear it with \code{\link[=dbClearResult]{dbClearResult()}}. } \description{ spec_result_send_query } \section{Failure modes}{ An error is raised when issuing a query over a closed or invalid connection, or if the query is not a non-\code{NA} string. An error is also raised if the syntax of the query is invalid and all query parameters are given (by passing the \code{params} argument) or the \code{immediate} argument is set to \code{TRUE}. } \section{Additional arguments}{ The following arguments are not part of the \code{dbSendQuery()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{params} (default: \code{NULL}) \item \code{immediate} (default: \code{NULL}) } They must be provided as named arguments. See the "Specification" sections for details on their usage. } \section{Specification}{ No warnings occur under normal conditions. When done, the DBIResult object must be cleared with a call to \code{\link[=dbClearResult]{dbClearResult()}}. Failure to clear the result set leads to a warning when the connection is closed. If the backend supports only one open result set per connection, issuing a second query invalidates an already open result set and raises a warning. The newly opened result set is valid and must be cleared with \code{dbClearResult()}. The \code{param} argument allows passing query parameters, see \code{\link[=dbBind]{dbBind()}} for details. } \section{Specification for the \code{immediate} argument}{ The \code{immediate} argument supports distinguishing between "direct" and "prepared" APIs offered by many database drivers. Passing \code{immediate = TRUE} leads to immediate execution of the query or statement, via the "direct" API (if supported by the driver). The default \code{NULL} means that the backend should choose whatever API makes the most sense for the database, and (if relevant) tries the other API if the first attempt fails. A successful second attempt should result in a message that suggests passing the correct \code{immediate} argument. Examples for possible behaviors: \enumerate{ \item DBI backend defaults to \code{immediate = TRUE} internally \enumerate{ \item A query without parameters is passed: query is executed \item A query with parameters is passed: \enumerate{ \item \code{params} not given: rejected immediately by the database because of a syntax error in the query, the backend tries \code{immediate = FALSE} (and gives a message) \item \code{params} given: query is executed using \code{immediate = FALSE} } } \item DBI backend defaults to \code{immediate = FALSE} internally \enumerate{ \item A query without parameters is passed: \enumerate{ \item simple query: query is executed \item "special" query (such as setting a config options): fails, the backend tries \code{immediate = TRUE} (and gives a message) } \item A query with parameters is passed: \enumerate{ \item \code{params} not given: waiting for parameters via \code{\link[=dbBind]{dbBind()}} \item \code{params} given: query is executed } } } } \seealso{ Other result specifications: \code{\link{spec_result_clear_result}}, \code{\link{spec_result_create_table_with_data_type}}, \code{\link{spec_result_execute}}, \code{\link{spec_result_fetch}}, \code{\link{spec_result_get_query}}, \code{\link{spec_result_roundtrip}}, \code{\link{spec_result_send_statement}} } \concept{result specifications} DBItest/man/DBItest-package.Rd0000644000176200001440000000156514157034343015514 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/DBItest.R \docType{package} \name{DBItest-package} \alias{DBItest} \alias{DBItest-package} \title{DBItest: Testing DBI Backends} \description{ A helper that tests DBI back ends for conformity to the interface. } \details{ The two most important functions are \code{\link[=make_context]{make_context()}} and \code{\link[=test_all]{test_all()}}. The former tells the package how to connect to your DBI backend, the latter executes all tests of the test suite. More fine-grained test functions (all with prefix \code{test_}) are available. See the package's vignette for more details. } \seealso{ Useful links: \itemize{ \item \url{https://dbitest.r-dbi.org} \item \url{https://github.com/r-dbi/DBItest} \item Report bugs at \url{https://github.com/r-dbi/DBItest/issues} } } \author{ Kirill Müller } DBItest/man/spec_transaction_with_transaction.Rd0000644000176200001440000000241614157037563021627 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-transaction-with-transaction.R \docType{data} \name{spec_transaction_with_transaction} \alias{spec_transaction_with_transaction} \title{spec_transaction_with_transaction} \value{ \code{dbWithTransaction()} returns the value of the executed code. } \description{ spec_transaction_with_transaction } \section{Failure modes}{ Failure to initiate the transaction (e.g., if the connection is closed or invalid of if \code{\link[=dbBegin]{dbBegin()}} has been called already) gives an error. } \section{Specification}{ \code{dbWithTransaction()} initiates a transaction with \code{dbBegin()}, executes the code given in the \code{code} argument, and commits the transaction with \code{\link[=dbCommit]{dbCommit()}}. If the code raises an error, the transaction is instead aborted with \code{\link[=dbRollback]{dbRollback()}}, and the error is propagated. If the code calls \code{dbBreak()}, execution of the code stops and the transaction is silently aborted. All side effects caused by the code (such as the creation of new variables) propagate to the calling environment. } \seealso{ Other transaction specifications: \code{\link{spec_transaction_begin_commit_rollback}} } \concept{transaction specifications} DBItest/man/spec_connection_disconnect.Rd0000644000176200001440000000135314157037563020211 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-connection-disconnect.R \docType{data} \name{spec_connection_disconnect} \alias{spec_connection_disconnect} \title{spec_connection_disconnect} \value{ \code{dbDisconnect()} returns \code{TRUE}, invisibly. } \description{ spec_connection_disconnect } \section{Failure modes}{ A warning is issued on garbage collection when a connection has been released without calling \code{dbDisconnect()}, but this cannot be tested automatically. A warning is issued immediately when calling \code{dbDisconnect()} on an already disconnected or invalid connection. } \seealso{ Other connection specifications: \code{\link{spec_get_info}} } \concept{connection specifications} DBItest/man/spec_sql_list_tables.Rd0000644000176200001440000000243414157037563017026 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-list-tables.R \docType{data} \name{spec_sql_list_tables} \alias{spec_sql_list_tables} \title{spec_sql_list_tables} \value{ \code{dbListTables()} returns a character vector that enumerates all tables and views in the database. Tables added with \code{\link[=dbWriteTable]{dbWriteTable()}} are part of the list. As soon a table is removed from the database, it is also removed from the list of database tables. The same applies to temporary tables if supported by the database. The returned names are suitable for quoting with \code{dbQuoteIdentifier()}. } \description{ spec_sql_list_tables } \section{Failure modes}{ An error is raised when calling this method for a closed or invalid connection. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/spec_sql_quote_identifier.Rd0000644000176200001440000000450014157037563020054 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-quote-identifier.R \docType{data} \name{spec_sql_quote_identifier} \alias{spec_sql_quote_identifier} \title{spec_sql_quote_identifier} \value{ \code{dbQuoteIdentifier()} returns an object that can be coerced to \link{character}, of the same length as the input. For an empty character vector this function returns a length-0 object. The names of the input argument are preserved in the output. When passing the returned object again to \code{dbQuoteIdentifier()} as \code{x} argument, it is returned unchanged. Passing objects of class \link{SQL} should also return them unchanged. (For backends it may be most convenient to return \link{SQL} objects to achieve this behavior, but this is not required.) } \description{ spec_sql_quote_identifier } \section{Failure modes}{ An error is raised if the input contains \code{NA}, but not for an empty string. } \section{Specification}{ Calling \code{\link[=dbGetQuery]{dbGetQuery()}} for a query of the format \verb{SELECT 1 AS ...} returns a data frame with the identifier, unquoted, as column name. Quoted identifiers can be used as table and column names in SQL queries, in particular in queries like \verb{SELECT 1 AS ...} and \verb{SELECT * FROM (SELECT 1) ...}. The method must use a quoting mechanism that is unambiguously different from the quoting mechanism used for strings, so that a query like \verb{SELECT ... FROM (SELECT 1 AS ...)} throws an error if the column names do not match. The method can quote column names that contain special characters such as a space, a dot, a comma, or quotes used to mark strings or identifiers, if the database supports this. In any case, checking the validity of the identifier should be performed only when executing a query, and not by \code{dbQuoteIdentifier()}. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/spec_sql_quote_string.Rd0000644000176200001440000000415414157037563017245 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-quote-string.R \docType{data} \name{spec_sql_quote_string} \alias{spec_sql_quote_string} \title{spec_sql_quote_string} \value{ \code{dbQuoteString()} returns an object that can be coerced to \link{character}, of the same length as the input. For an empty character vector this function returns a length-0 object. When passing the returned object again to \code{dbQuoteString()} as \code{x} argument, it is returned unchanged. Passing objects of class \link{SQL} should also return them unchanged. (For backends it may be most convenient to return \link{SQL} objects to achieve this behavior, but this is not required.) } \description{ spec_sql_quote_string } \section{Specification}{ The returned expression can be used in a \verb{SELECT ...} query, and for any scalar character \code{x} the value of \code{dbGetQuery(paste0("SELECT ", dbQuoteString(x)))[[1]]} must be identical to \code{x}, even if \code{x} contains spaces, tabs, quotes (single or double), backticks, or newlines (in any combination) or is itself the result of a \code{dbQuoteString()} call coerced back to character (even repeatedly). If \code{x} is \code{NA}, the result must merely satisfy \code{\link[=is.na]{is.na()}}. The strings \code{"NA"} or \code{"NULL"} are not treated specially. \code{NA} should be translated to an unquoted SQL \code{NULL}, so that the query \verb{SELECT * FROM (SELECT 1) a WHERE ... IS NULL} returns one row. } \section{Failure modes}{ Passing a numeric, integer, logical, or raw vector, or a list for the \code{x} argument raises an error. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/context.Rd0000644000176200001440000000364114157034343014306 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/context.R \name{make_context} \alias{make_context} \alias{set_default_context} \alias{get_default_context} \title{Test contexts} \usage{ make_context( drv, connect_args = NULL, set_as_default = TRUE, tweaks = NULL, name = NULL, default_skip = NULL ) set_default_context(ctx) get_default_context() } \arguments{ \item{drv}{\verb{[DBIConnector]}\cr An object of class \linkS4class{DBIConnector} that describes how to connect to the database.} \item{connect_args}{\verb{[named list]}\cr Deprecated.} \item{set_as_default}{\verb{[logical(1)]}\cr Should the created context be set as default context?} \item{tweaks}{\verb{[DBItest_tweaks]}\cr Tweaks as constructed by the \code{\link[=tweaks]{tweaks()}} function.} \item{name}{\verb{[character]}\cr An optional name of the context which will be used in test messages.} \item{default_skip}{\verb{[character]}\cr Default value of \code{skip} argument to \code{\link[=test_all]{test_all()}} and other testing functions.} \item{ctx}{\verb{[DBItest_context]}\cr A test context.} } \value{ \verb{[DBItest_context]}\cr A test context, for \code{set_default_context} the previous default context (invisibly) or \code{NULL}. } \description{ Create a test context, set and query the default context. } \examples{ make_context( new( "DBIConnector", .drv = RSQLite::SQLite(), .conn_args = list(dbname = tempfile("DBItest", fileext = ".sqlite")) ), tweaks = tweaks( constructor_relax_args = TRUE, placeholder_pattern = c("?", "$1", "$name", ":name"), date_cast = function(x) paste0("'", x, "'"), time_cast = function(x) paste0("'", x, "'"), timestamp_cast = function(x) paste0("'", x, "'"), logical_return = function(x) as.integer(x), date_typed = FALSE, time_typed = FALSE, timestamp_typed = FALSE ), default_skip = c("roundtrip_date", "roundtrip_timestamp") ) } DBItest/man/test_stress.Rd0000644000176200001440000000155214157034343015203 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-stress.R \name{test_stress} \alias{test_stress} \title{Stress tests} \usage{ test_stress(skip = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Stress tests } \seealso{ Other tests: \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_transaction}()} } \concept{tests} \keyword{internal} DBItest/man/spec_get_info.Rd0000644000176200001440000000512714157037563015436 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-driver-get-info.R, % R/spec-connection-get-info.R, R/spec-meta-get-info-result.R \docType{data} \name{spec_get_info} \alias{spec_get_info} \alias{spec_driver_get_info} \alias{spec_connection_get_info} \alias{spec_meta_get_info_result} \title{spec_driver_get_info} \value{ For objects of class \linkS4class{DBIDriver}, \code{dbGetInfo()} returns a named list that contains at least the following components: \itemize{ \item \code{driver.version}: the package version of the DBI backend, \item \code{client.version}: the version of the DBMS client library. } For objects of class \linkS4class{DBIConnection}, \code{dbGetInfo()} returns a named list that contains at least the following components: \itemize{ \item \code{db.version}: version of the database server, \item \code{dbname}: database name, \item \code{username}: username to connect to the database, \item \code{host}: hostname of the database server, \item \code{port}: port on the database server. It must not contain a \code{password} component. Components that are not applicable should be set to \code{NA}. } For objects of class \linkS4class{DBIResult}, \code{dbGetInfo()} returns a named list that contains at least the following components: \itemize{ \item \code{statatment}: the statement used with \code{\link[=dbSendQuery]{dbSendQuery()}} or \code{\link[=dbExecute]{dbExecute()}}, as returned by \code{\link[=dbGetStatement]{dbGetStatement()}}, \item \code{row.count}: the number of rows fetched so far (for queries), as returned by \code{\link[=dbGetRowCount]{dbGetRowCount()}}, \item \code{rows.affected}: the number of rows affected (for statements), as returned by \code{\link[=dbGetRowsAffected]{dbGetRowsAffected()}} \item \code{has.completed}: a logical that indicates if the query or statement has completed, as returned by \code{\link[=dbHasCompleted]{dbHasCompleted()}}. } } \description{ spec_driver_get_info spec_connection_get_info spec_meta_get_info_result } \seealso{ Other driver specifications: \code{\link{spec_driver_connect}}, \code{\link{spec_driver_constructor}}, \code{\link{spec_driver_data_type}} Other connection specifications: \code{\link{spec_connection_disconnect}} Other meta specifications: \code{\link{spec_meta_bind}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} } \concept{connection specifications} \concept{driver specifications} \concept{meta specifications} DBItest/man/spec_meta_column_info.Rd0000644000176200001440000000304614157037563017160 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-column-info.R \docType{data} \name{spec_meta_column_info} \alias{spec_meta_column_info} \title{spec_meta_column_info} \value{ \code{dbColumnInfo()} returns a data frame with at least two columns \code{"name"} and \code{"type"} (in that order) (and optional columns that start with a dot). The \code{"name"} and \code{"type"} columns contain the names and types of the R columns of the data frame that is returned from \code{\link[=dbFetch]{dbFetch()}}. The \code{"type"} column is of type \code{character} and only for information. Do not compute on the \code{"type"} column, instead use \code{dbFetch(res, n = 0)} to create a zero-row data frame initialized with the correct data types. } \description{ spec_meta_column_info } \section{Failure modes}{ An attempt to query columns for a closed result set raises an error. } \section{Specification}{ A column named \code{row_names} is treated like any other column. The column names are always consistent with the data returned by \code{dbFetch()}. If the query returns unnamed columns, non-empty and non-\code{NA} names are assigned. Column names that correspond to SQL or R keywords are left unchanged. } \seealso{ Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_bind}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} } \concept{meta specifications} DBItest/man/spec_meta_get_row_count.Rd0000644000176200001440000000262514157037563017530 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-get-row-count.R \docType{data} \name{spec_meta_get_row_count} \alias{spec_meta_get_row_count} \title{spec_meta_get_row_count} \value{ \code{dbGetRowCount()} returns a scalar number (integer or numeric), the number of rows fetched so far. After calling \code{\link[=dbSendQuery]{dbSendQuery()}}, the row count is initially zero. After a call to \code{\link[=dbFetch]{dbFetch()}} without limit, the row count matches the total number of rows returned. Fetching a limited number of rows increases the number of rows by the number of rows returned, even if fetching past the end of the result set. For queries with an empty result set, zero is returned even after fetching. For data manipulation statements issued with \code{\link[=dbSendStatement]{dbSendStatement()}}, zero is returned before and after calling \code{dbFetch()}. } \description{ spec_meta_get_row_count } \section{Failure modes}{ Attempting to get the row count for a result set cleared with \code{\link[=dbClearResult]{dbClearResult()}} gives an error. } \seealso{ Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_bind}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} } \concept{meta specifications} DBItest/man/test_getting_started.Rd0000644000176200001440000000227414157034343017051 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-getting-started.R \name{test_getting_started} \alias{test_getting_started} \title{Getting started with testing} \usage{ test_getting_started(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Tests very basic features of a DBI driver package, to support testing and test-first development right from the start. } \seealso{ Other tests: \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/DESCRIPTION0000644000176200001440000000635414157055565013303 0ustar liggesusersPackage: DBItest Title: Testing DBI Backends Version: 1.7.2 Date: 2021-12-17 Authors@R: c(person(given = "Kirill", family = "M\u00fcller", role = c("aut", "cre"), email = "krlmlr+r@mailbox.org", comment = c(ORCID = "0000-0002-1416-3412")), person(given = "RStudio", role = "cph"), person(given = "R Consortium", role = "fnd")) Description: A helper that tests DBI back ends for conformity to the interface. License: LGPL (>= 2.1) URL: https://dbitest.r-dbi.org, https://github.com/r-dbi/DBItest BugReports: https://github.com/r-dbi/DBItest/issues Depends: R (>= 3.2.0) Imports: blob (>= 1.2.0), callr, DBI (>= 1.1.1), desc, hms (>= 0.5.0), lubridate, methods, palmerpenguins, R6, rlang (>= 0.2.0), testthat (>= 2.0.0), utils, withr, vctrs Suggests: clipr, dblog (>= 0.0.0.9008), debugme, devtools, knitr, lintr, rmarkdown, RSQLite VignetteBuilder: knitr Additional_repositories: https://r-dbi.r-universe.dev Encoding: UTF-8 KeepSource: true RoxygenNote: 7.1.2 Collate: 'DBItest.R' 'context.R' 'expectations.R' 'import-dbi.R' 'import-testthat.R' 'run.R' 's4.R' 'spec-getting-started.R' 'spec-compliance-methods.R' 'spec-driver-constructor.R' 'spec-driver-data-type.R' 'spec-connection-data-type.R' 'spec-result-create-table-with-data-type.R' 'spec-driver-connect.R' 'spec-connection-disconnect.R' 'spec-result-send-query.R' 'spec-result-fetch.R' 'spec-result-roundtrip.R' 'spec-result-clear-result.R' 'spec-result-get-query.R' 'spec-result-send-statement.R' 'spec-result-execute.R' 'spec-sql-quote-string.R' 'spec-sql-quote-literal.R' 'spec-sql-quote-identifier.R' 'spec-sql-unquote-identifier.R' 'spec-sql-read-table.R' 'spec-sql-create-table.R' 'spec-sql-append-table.R' 'spec-sql-write-table.R' 'spec-sql-list-tables.R' 'spec-sql-exists-table.R' 'spec-sql-remove-table.R' 'spec-sql-list-objects.R' 'spec-meta-bind-runner.R' 'spec-meta-bind-tester-extra.R' 'spec-meta-bind.R' 'spec-meta-bind-.R' 'spec-meta-is-valid.R' 'spec-meta-has-completed.R' 'spec-meta-get-statement.R' 'spec-meta-get-row-count.R' 'spec-meta-get-rows-affected.R' 'spec-transaction-begin-commit-rollback.R' 'spec-transaction-with-transaction.R' 'spec-driver-get-info.R' 'spec-connection-get-info.R' 'spec-sql-list-fields.R' 'spec-meta-column-info.R' 'spec-meta-get-info-result.R' 'spec-driver.R' 'spec-connection.R' 'spec-result.R' 'spec-sql.R' 'spec-meta.R' 'spec-transaction.R' 'spec-compliance.R' 'spec-stress-connection.R' 'spec-stress.R' 'spec-all.R' 'spec-.R' 'test-all.R' 'test-getting-started.R' 'test-driver.R' 'test-connection.R' 'test-result.R' 'test-sql.R' 'test-meta.R' 'test-transaction.R' 'test-compliance.R' 'test-stress.R' 'tweaks.R' 'utf8.R' 'utils.R' 'zzz.R' NeedsCompilation: no Packaged: 2021-12-17 09:16:15 UTC; kirill Author: Kirill Müller [aut, cre] (), RStudio [cph], R Consortium [fnd] Maintainer: Kirill Müller Repository: CRAN Date/Publication: 2021-12-17 09:42:13 UTC DBItest/build/0000755000176200001440000000000014157052537012660 5ustar liggesusersDBItest/build/vignette.rds0000644000176200001440000000032014157052537015212 0ustar liggesusers‹‹ąb```b`add`b2™… 1# 'ęvqņ,I-.Ń ŹMA“ Šgę„+•($%&g§ę„£©įiĻ(ÉĶA“ć„ t†0X¢X˜²ę%榢›Ģī’ZµšvżŒ’Ń“px§V–ēĮō ØaƒŖaqĖĢI…Ł’Yē0øøA™ŒAčnĄ0ÅżœEłåz0?š‚½Hüt&ē$£{”+%±$Q/­ØänR¤S¶DBItest/tests/0000755000176200001440000000000014157034343012716 5ustar liggesusersDBItest/tests/testthat/0000755000176200001440000000000014157055565014567 5ustar liggesusersDBItest/tests/testthat/test-tweaks.R0000644000176200001440000000055414157034343017160 0ustar liggesuserscontext("tweaks") test_that("tweaks work as expected", { expect_true(names(formals(tweaks))[[1]] == "...") expect_warning(tweaks(`_oooops` = 42, `_darn` = -1), "_oooops, _darn") expect_warning(tweaks(), NA) expect_warning(tweaks(5), "named") expect_warning(tweaks(5, `_ooops` = 42), "named") expect_warning(tweaks(constructor_name = "constr"), NA) }) DBItest/tests/testthat/test-consistency.R0000644000176200001440000000231714157034343020222 0ustar liggesuserscontext("consistency") test_that("no unnamed specs", { tests <- spec_all[!vapply(spec_all, is.null, logical(1L))] vicinity <- NULL if (any(names(tests) == "")) { vicinity <- sort(unique(unlist( lapply(which(names(tests) == ""), "+", -1:1) ))) vicinity <- vicinity[names(tests)[vicinity] != ""] } expect_null(vicinity) }) test_that("no duplicate spec names expect known exceptions", { all_names <- names(spec_all) all_names <- all_names[!(all_names %in% c( "create_temporary_table", "create_table_visible_in_other_connection", "list_tables", "exists_table", "temporary_table", "list_objects", "table_visible_in_other_connection", "begin_write_disconnect", "begin_write_commit" ))] dupe_names <- unique(all_names[duplicated(all_names)]) expect_equal(dupe_names, rep("", length(dupe_names))) }) test_that("all specs used", { env <- asNamespace("DBItest") defined_spec_names <- ls(env, pattern = "^spec_") defined_specs <- mget(defined_spec_names, env) defined_spec_names <- unlist(sapply(defined_specs, names), use.names = FALSE) new_names <- setdiff(defined_spec_names, names(spec_all)) expect_equal(new_names, rep("", length(new_names))) }) DBItest/tests/testthat/test-lint.R0000644000176200001440000000047514157034343016632 0ustar liggesuserscontext("lint") test_that("lintr is happy", { skip_on_cran() expect_false("package:DBI" %in% search()) require(DBI) on.exit(detach(), add = TRUE) expect_true("package:DBI" %in% search()) # lintr::expect_lint_free() detach() on.exit(NULL, add = FALSE) expect_false("package:DBI" %in% search()) }) DBItest/tests/testthat/test-context.R0000644000176200001440000000014314157034343017340 0ustar liggesuserscontext("context") test_that("default context is NULL", { expect_null(get_default_context()) }) DBItest/tests/testthat.R0000644000176200001440000000007214157034343014700 0ustar liggesuserslibrary(testthat) library(DBItest) test_check("DBItest") DBItest/vignettes/0000755000176200001440000000000014157052537013571 5ustar liggesusersDBItest/vignettes/DBItest.Rmd0000644000176200001440000001622614157034343015535 0ustar liggesusers--- title: "Testing DBI backends" author: "Kirill Müller" output: rmarkdown::html_vignette vignette: > %\VignetteIndexEntry{Testing DBI backends} %\VignetteEngine{knitr::rmarkdown} \usepackage[utf8]{inputenc} --- ```{r setup, include = FALSE} knitr::opts_chunk$set(error = (getRversion() < "3.5")) ``` This document shows how to use the DBItest package when implementing a new DBI backend or when applying it to an existing backend. The DBItest package provides a large collection of automated tests. ## Testing a new backend The test cases in the DBItest package are structured very similarly to the sections in the "backend" vignette: ```r vignette("backend", package = "DBI") ``` Like the "backend" vignette, this vignette assumes that you are implementing the `RKazam` package that has a `Kazam()` function that creates a new `DBIDriver` instance for connecting to a "Kazam" database. You can add the tests in the DBItest package incrementally, as you proceed with implementing the various parts of the DBI. The DBItest package builds upon the testthat package. To enable it, run the following in your package directory (after installing or updating `devtools`): ```r devtools::use_testthat() devtools::use_test("DBItest") ``` This creates, among others, a file `test-DBItest.R` in the `tests/testthat` directory. Replace its entire contents by the following: ```r DBItest::make_context(Kazam(), NULL) DBItest::test_getting_started() ``` Now test your package with `devtools::test()`. If you followed at least the "Getting started" section of the DBI "backend" vignette, all tests should succeed. By adding the corresponding test function to your `tests/test-DBItest.R` file *before* implementing a section, you get immediate feedback which functionality of this section still needs to be implemented by running `devtools::test()` again. Therefore, proceed by appending the following to `tests/test-DBItest.R`, to include a test case for the forthcoming section: ```r DBItest::test_driver() ``` Again, all tests should succeed when you are done with the "Driver" section. Add the call to the next tester function, implement the following section until all tests succeed, and so forth. In this scenario, you are usually interested only in the first error the test suite finds. The `StopReporter` of `testthat` is most helpful here, activate it by passing `reporter = "stop"` to `devtools::test()`. Alternatively, call the relevant `DBItest::test_()` function directly. The tests are documented with the corresponding functions: For instance, `?test_driver` shows a coarse description of all tests for the "Driver" test case. Test failures will include the name of the test that is failing; in this case, investigating the documentation or the source code of the DBItest package will usually lead to the cause of the error. Not all tests can be satisfied: For example, there is one test that tests that `logical` variables survive a write-read roundtrip to the database, whereas another test tests that `logical` variables are converted to `integer` in such a case. Tests can be skipped by adding regular expressions for the tests to skip as character vector to the call, as in the following[^termnull]: [^termnull]: The terminating `NULL` allows appending new lines to the end by copy-pasting an existing line, without having to take care of the terminating comma. ```r DBItest::test_driver(skip = c( "data_type" # Reason 1... "constructor.*", # Reason 2... NULL )) ``` Some other reasons to skip tests are: - your database does not support a feature - you want to postpone or avoid the implementation of a feature - the test takes too long to run. ## Testing an existing backend For an existing backends, simply enabling all tests may be the quickest way to get started. Run the following in your package directory (after installing or updating `devtools`): ```r devtools::use_testthat() devtools::use_test("DBItest") ``` This creates, among others, a file `test-DBItest.R` in the `tests/testthat` directory. Replace its entire contents by the following: ```r DBItest::make_context(Kazam(), NULL) DBItest::test_all() ``` The notes about "Kazam" and skipping tests from the previous section apply here as well. The `test_all()` function simply calls all test cases. ## External testing DBItest is currently geared towards usage as part of a package's test suite. With some effort it is possible to test a database backend against a custom database. This can help verify that your database installation gives expected results when accessed with DBI with specific connection arguments. The example below shows how to run tests with the RSQLite backend. ### Preparation First, we need to define a test context. It contains: - a connector that describes how to establish the database connection, see ``?DBI::`DBIConnector-class` `` for details, - tweaks, see `?tweaks`, - tests skipped by default, as a character vector. Database backends that use DBItest for testing usually have a file `test/testthat/helper-DBItest.R` or `test/testthat/test-DBItest.R` where a call to `make_context()` can be found. The help for `make_context()` already contains an example that works for RSQLite. Adapt it to your needs. The `make_context()` function must be called before any tests can run. ```{r make-context} library(DBItest) tweaks <- tweaks( constructor_relax_args = TRUE, placeholder_pattern = c("?", "$1", "$name", ":name"), date_cast = function(x) paste0("'", x, "'"), time_cast = function(x) paste0("'", x, "'"), timestamp_cast = function(x) paste0("'", x, "'"), logical_return = function(x) as.integer(x), date_typed = FALSE, time_typed = FALSE, timestamp_typed = FALSE ) default_skip <- c("roundtrip_date", "roundtrip_timestamp") invisible(make_context( new( "DBIConnector", .drv = RSQLite::SQLite(), .conn_args = list(dbname = tempfile("DBItest", fileext = ".sqlite")) ), tweaks = tweaks, default_skip = default_skip )) ``` ### Testing Use `test_all()` to run all tests, and `test_some()` to run a specific test that failed previously. The `test_*` functions need to be run with a testthat reporter to avoid stopping at the first error or warning. For interactive use, the "progress" reporter gives good results. In the example below, the "location" and "stop" reporters are combined. Review `?testthat::Reporter` for a list of reporters. ```{r simple, error = (getRversion() < "3.5") || !rlang::is_installed("dblog")} DBItest::test_some("get_query_atomic") ``` DBItest relies heavily on metaprogramming. Unfortunately, this means that a failing test may give no indication of the reason for the failure. The `test_some()` function now by default integrates the new experimental [dblog package](https://github.com/r-dbi/dblog) package. It prints the DBI code that is executed as the tests are run, as seen above. Another way to scout for the reason of the problem is to review the sources of DBItest and relate the test name (that is printed with each failure) with the human-readable specification embedded with the test code. ```{r location, error = (getRversion() < "3.5") || !rlang::is_installed("dblog")} testthat::with_reporter( c("location", "fail"), DBItest::test_some("get_query_atomic") ) ``` DBItest/R/0000755000176200001440000000000014157037563011764 5ustar liggesusersDBItest/R/test-driver.R0000644000176200001440000000066614157034343014360 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_driver()]: #' Test the "Driver" class NULL #' Test the "Driver" class #' #' @inheritParams test_all #' @include test-getting-started.R #' @family tests #' @importFrom withr with_temp_libpaths #' @export test_driver <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Driver" run_tests(ctx, spec_driver, skip, run_only, test_suite) } DBItest/R/spec-stress.R0000644000176200001440000000007514157034343014355 0ustar liggesusers#' @format NULL spec_stress <- c( spec_stress_connection ) DBItest/R/import-testthat.R0000644000176200001440000000041714157034343015252 0ustar liggesusers#' @import testthat #' @importFrom rlang quo enquo enquos expr enexpr eval_tidy list2 has_length := #' @importFrom rlang abort is_interactive NULL #' @importFrom methods findMethod getClasses getClass extends #' @importFrom stats setNames #' @importFrom utils head NULL DBItest/R/test-connection.R0000644000176200001440000000073714157034343015223 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_connection()]: #' Test the "Connection" class NULL #' Test the "Connection" class #' #' @inheritParams test_all #' @include test-driver.R #' @family tests #' @importFrom withr with_temp_libpaths #' @importFrom methods is #' @export test_connection <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Connection" run_tests(ctx, spec_connection, skip, run_only, test_suite) } DBItest/R/test-getting-started.R0000644000176200001440000000106014157034343016157 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_getting_started()]: #' Getting started with testing NULL #' Getting started with testing #' #' Tests very basic features of a DBI driver package, to support testing #' and test-first development right from the start. #' #' @inheritParams test_all #' @include test-all.R #' @family tests #' @export test_getting_started <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Getting started" run_tests(ctx, spec_getting_started, skip, run_only, test_suite) } DBItest/R/spec-result-get-query.R0000644000176200001440000001741414157037563016304 0ustar liggesusers#' spec_result_get_query #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_get_query <- list( get_query_formals = function() { # expect_equal(names(formals(dbGetQuery)), c("conn", "statement", "...")) }, #' @return #' `dbGetQuery()` always returns a [data.frame] #' with as many rows as records were fetched and as many #' columns as fields in the result set, #' even if the result is a single value get_query_atomic = function(con) { query <- trivial_query() rows <- check_df(dbGetQuery(con, query)) expect_equal(rows, data.frame(a = 1.5)) }, #' or has one get_query_one_row = function(con) { query <- trivial_query(3, letters[1:3]) result <- trivial_df(3, letters[1:3]) rows <- check_df(dbGetQuery(con, query)) expect_identical(rows, result) }, #' or zero rows. get_query_zero_rows = function(con) { # Not all SQL dialects seem to support the query used here. query <- "SELECT * FROM (SELECT 1 as a, 2 as b, 3 as c) AS x WHERE (1 = 0)" rows <- check_df(dbGetQuery(con, query)) expect_identical(names(rows), letters[1:3]) expect_identical(dim(rows), c(0L, 3L)) }, #' #' @section Failure modes: #' An error is raised when issuing a query over a closed get_query_closed_connection = function(ctx, closed_con) { expect_error(dbGetQuery(closed_con, trivial_query())) }, #' or invalid connection, get_query_invalid_connection = function(ctx, invalid_con) { expect_error(dbGetQuery(invalid_con, trivial_query())) }, #' if the syntax of the query is invalid, get_query_syntax_error = function(con) { expect_error(dbGetQuery(con, "SELLECT")) }, #' or if the query is not a non-`NA` string. get_query_non_string = function(con) { expect_error(dbGetQuery(con, character())) expect_error(dbGetQuery(con, letters)) expect_error(dbGetQuery(con, NA_character_)) }, #' If the `n` argument is not an atomic whole number #' greater or equal to -1 or Inf, an error is raised, get_query_n_bad = function(con) { query <- trivial_query() expect_error(dbGetQuery(con, query, n = -2)) expect_error(dbGetQuery(con, query, n = 1.5)) expect_error(dbGetQuery(con, query, n = integer())) expect_error(dbGetQuery(con, query, n = 1:3)) expect_error(dbGetQuery(con, query, n = NA_integer_)) }, #' but a subsequent call to `dbGetQuery()` with proper `n` argument succeeds. get_query_good_after_bad_n = function(con) { query <- trivial_query() expect_error(dbGetQuery(con, query, n = NA_integer_)) rows <- check_df(dbGetQuery(con, query)) expect_equal(rows, data.frame(a = 1.5)) }, #' @section Additional arguments: #' The following arguments are not part of the `dbGetQuery()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `n` (default: -1) #' - `params` (default: `NULL`) #' - `immediate` (default: `NULL`) #' #' They must be provided as named arguments. #' See the "Specification" and "Value" sections for details on their usage. #' @section Specification: #' #' A column named `row_names` is treated like any other column. get_query_row_names = function(con) { query <- trivial_query(column = "row_names") result <- trivial_df(column = "row_names") rows <- check_df(dbGetQuery(con, query)) expect_identical(rows, result) expect_identical(.row_names_info(rows), -1L) }, #' #' The `n` argument specifies the number of rows to be fetched. #' If omitted, fetching multi-row queries with one get_query_multi_row_single_column = function(ctx, con) { query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(3) rows <- check_df(dbGetQuery(con, query)) expect_identical(rows, result) }, #' or more columns returns the entire result. get_query_multi_row_multi_column = function(ctx, con) { query <- union( .ctx = ctx, paste("SELECT", 1:5 + 0.5, "AS a,", 4:0 + 0.5, "AS b"), .order_by = "a" ) rows <- check_df(dbGetQuery(con, query)) expect_identical(rows, data.frame(a = 1:5 + 0.5, b = 4:0 + 0.5)) }, #' A value of [Inf] for the `n` argument is supported #' and also returns the full result. get_query_n_multi_row_inf = function(ctx, con) { query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(3) rows <- check_df(dbGetQuery(con, query, n = Inf)) expect_identical(rows, result) }, #' If more rows than available are fetched (by passing a too large value for #' `n`), the result is returned in full without warning. get_query_n_more_rows = function(ctx, con) { query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(3) rows <- check_df(dbGetQuery(con, query, n = 5L)) expect_identical(rows, result) }, #' If zero rows are requested, the columns of the data frame are still fully #' typed. get_query_n_zero_rows = function(ctx, con) { query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(0) rows <- check_df(dbGetQuery(con, query, n = 0L)) expect_identical(rows, result) }, #' Fetching fewer rows than available is permitted, #' no warning is issued. get_query_n_incomplete = function(ctx, con) { query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(2) rows <- check_df(dbGetQuery(con, query, n = 2L)) expect_identical(rows, result) }, #' #' The `param` argument allows passing query parameters, see [dbBind()] for details. get_query_params = function(ctx, con) { placeholder_funs <- get_placeholder_funs(ctx) for (placeholder_fun in placeholder_funs) { placeholder <- placeholder_fun(1) query <- paste0("SELECT ", placeholder, " + 1.0 AS a") values <- trivial_values(3) - 1 params <- stats::setNames(list(values), names(placeholder)) ret <- dbGetQuery(con, query, params = params) expect_equal(ret, trivial_df(3), info = placeholder) } }, #' @section Specification for the `immediate` argument: #' #' The `immediate` argument supports distinguishing between "direct" #' and "prepared" APIs offered by many database drivers. #' Passing `immediate = TRUE` leads to immediate execution of the #' query or statement, via the "direct" API (if supported by the driver). #' The default `NULL` means that the backend should choose whatever API #' makes the most sense for the database, and (if relevant) tries the #' other API if the first attempt fails. A successful second attempt #' should result in a message that suggests passing the correct #' `immediate` argument. #' Examples for possible behaviors: #' 1. DBI backend defaults to `immediate = TRUE` internally #' 1. A query without parameters is passed: query is executed #' 1. A query with parameters is passed: #' 1. `params` not given: rejected immediately by the database #' because of a syntax error in the query, the backend tries #' `immediate = FALSE` (and gives a message) #' 1. `params` given: query is executed using `immediate = FALSE` #' 1. DBI backend defaults to `immediate = FALSE` internally #' 1. A query without parameters is passed: #' 1. simple query: query is executed #' 1. "special" query (such as setting a config options): fails, #' the backend tries `immediate = TRUE` (and gives a message) #' 1. A query with parameters is passed: #' 1. `params` not given: waiting for parameters via [dbBind()] #' 1. `params` given: query is executed get_query_immediate = function(con, table_name) { res <- expect_visible(dbGetQuery(con, trivial_query(), immediate = TRUE)) expect_s3_class(res, "data.frame") }, # NULL ) DBItest/R/spec-meta-get-row-count.R0000644000176200001440000000514014157037563016475 0ustar liggesusers#' spec_meta_get_row_count #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_get_row_count <- list( get_row_count_formals = function() { # expect_equal(names(formals(dbGetRowCount)), c("res", "...")) }, #' @return #' `dbGetRowCount()` returns a scalar number (integer or numeric), #' the number of rows fetched so far. row_count_query = function(con) { query <- trivial_query() #' After calling [dbSendQuery()], res <- local_result(dbSendQuery(con, query)) rc <- dbGetRowCount(res) #' the row count is initially zero. expect_equal(rc, 0L) #' After a call to [dbFetch()] without limit, check_df(dbFetch(res)) rc <- dbGetRowCount(res) #' the row count matches the total number of rows returned. expect_equal(rc, 1L) }, # row_count_query_limited = function(ctx, con) { query <- union(.ctx = ctx, trivial_query(), "SELECT 2", "SELECT 3") res <- local_result(dbSendQuery(con, query)) rc1 <- dbGetRowCount(res) expect_equal(rc1, 0L) #' Fetching a limited number of rows check_df(dbFetch(res, 2L)) #' increases the number of rows by the number of rows returned, rc2 <- dbGetRowCount(res) expect_equal(rc2, 2L) #' even if fetching past the end of the result set. check_df(dbFetch(res, 2L)) rc3 <- dbGetRowCount(res) expect_equal(rc3, 3L) }, # row_count_query_empty = function(ctx, con) { #' For queries with an empty result set, query <- union( .ctx = ctx, "SELECT * FROM (SELECT 1 as a) a WHERE (0 = 1)" ) res <- local_result(dbSendQuery(con, query)) rc <- dbGetRowCount(res) #' zero is returned expect_equal(rc, 0L) check_df(dbFetch(res)) rc <- dbGetRowCount(res) #' even after fetching. expect_equal(rc, 0L) }, # row_count_statement = function(con, table_name) { query <- paste0("CREATE TABLE ", table_name, " (a integer)") #' For data manipulation statements issued with #' [dbSendStatement()], res <- local_result(dbSendStatement(con, query)) rc <- dbGetRowCount(res) #' zero is returned before expect_equal(rc, 0L) expect_warning(check_df(dbFetch(res))) rc <- dbGetRowCount(res) #' and after calling `dbFetch()`. expect_equal(rc, 0L) }, #' #' @section Failure modes: get_row_count_error = function(con) { res <- dbSendQuery(con, trivial_query()) dbClearResult(res) #' Attempting to get the row count for a result set cleared with #' [dbClearResult()] gives an error. expect_error(dbGetRowCount(res)) }, # NULL ) DBItest/R/spec-sql-quote-literal.R0000644000176200001440000001203214157037563016421 0ustar liggesusers#' spec_sql_quote_literal #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_quote_literal <- list( quote_literal_formals = function() { # expect_equal(names(formals(dbQuoteLiteral)), c("conn", "x", "...")) }, #' @return quote_literal_return = function(con) { #' `dbQuoteLiteral()` returns an object that can be coerced to [character], simple <- "simple" simple_out <- dbQuoteLiteral(con, simple) expect_error(as.character(simple_out), NA) expect_is(as.character(simple_out), "character") expect_equal(length(simple_out), 1L) }, # quote_literal_vectorized = function(con) { #' of the same length as the input. letters_out <- dbQuoteLiteral(con, letters) expect_equal(length(letters_out), length(letters)) }, # quote_literal_empty = function(ctx, con) { if (as.package_version(ctx$tweaks$dbitest_version) < "1.7.2") { skip(paste0("tweak: dbitest_version: ", ctx$tweaks$dbitest_version)) } #' For an empty #' integer, expect_equal(length(dbQuoteLiteral(con, integer())), 0L) #' numeric, expect_equal(length(dbQuoteLiteral(con, numeric())), 0L) #' character, expect_equal(length(dbQuoteLiteral(con, character())), 0L) #' logical, expect_equal(length(dbQuoteLiteral(con, logical())), 0L) #' date, expect_equal(length(dbQuoteLiteral(con, Sys.Date()[0])), 0L) #' time, expect_equal(length(dbQuoteLiteral(con, Sys.time()[0])), 0L) #' or blob vector, expect_equal(length(dbQuoteLiteral(con, list())), 0L) #' this function returns a length-0 object. }, # quote_literal_double = function(con) { simple <- "simple" simple_out <- dbQuoteLiteral(con, simple) letters_out <- dbQuoteLiteral(con, letters) empty <- character() empty_out <- dbQuoteLiteral(con, character()) #' #' When passing the returned object again to `dbQuoteLiteral()` #' as `x` #' argument, it is returned unchanged. expect_identical(dbQuoteLiteral(con, simple_out), simple_out) expect_identical(dbQuoteLiteral(con, letters_out), letters_out) expect_identical(dbQuoteLiteral(con, empty_out), empty_out) #' Passing objects of class [SQL] should also return them unchanged. expect_identical(dbQuoteLiteral(con, SQL(simple)), SQL(simple)) expect_identical(dbQuoteLiteral(con, SQL(letters)), SQL(letters)) expect_identical(dbQuoteLiteral(con, SQL(empty)), SQL(empty)) #' (For backends it may be most convenient to return [SQL] objects #' to achieve this behavior, but this is not required.) }, #' @section Specification: quote_literal_roundtrip = function(ctx, con) { do_test_literal <- function(x) { #' The returned expression can be used in a `SELECT ...` query, literals <- vapply(x, dbQuoteLiteral, conn = con, character(1)) query <- paste0("SELECT ", paste(literals, collapse = ", ")) #' and the value of #' \code{dbGetQuery(paste0("SELECT ", dbQuoteLiteral(x)))[[1]]} #' must be equal to `x` x_out <- check_df(dbGetQuery(con, query)) expect_equal(nrow(x_out), 1L) is_logical <- vapply(x, is.logical, FUN.VALUE = logical(1)) x_out[is_logical] <- lapply(x_out[is_logical], as.logical) is_numeric <- vapply(x, is.numeric, FUN.VALUE = logical(1)) x_out[is_numeric] <- lapply(x_out[is_numeric], as.numeric) expect_equal(as.list(unname(x_out)), x) } #' for any scalar test_literals <- list( #' integer, 1L, #' numeric, 2.5, #' string, "string", #' and logical. TRUE ) do_test_literal(test_literals) }, # quote_literal_na = function(ctx, con) { null <- dbQuoteLiteral(con, NA_character_) quoted_null <- dbQuoteLiteral(con, as.character(null)) na <- dbQuoteLiteral(con, "NA") quoted_na <- dbQuoteLiteral(con, as.character(na)) query <- paste0( "SELECT ", null, " AS null_return,", na, " AS na_return,", quoted_null, " AS quoted_null,", quoted_na, " AS quoted_na" ) #' If `x` is `NA`, the result must merely satisfy [is.na()]. rows <- check_df(dbGetQuery(con, query)) expect_true(is.na(rows$null_return)) #' The literals `"NA"` or `"NULL"` are not treated specially. expect_identical(rows$na_return, "NA") expect_identical(rows$quoted_null, as.character(null)) expect_identical(rows$quoted_na, as.character(na)) }, # quote_literal_na_is_null = function(ctx, con) { #' #' `NA` should be translated to an unquoted SQL `NULL`, null <- dbQuoteLiteral(con, NA_character_) #' so that the query `SELECT * FROM (SELECT 1) a WHERE ... IS NULL` rows <- check_df(dbGetQuery(con, paste0("SELECT * FROM (SELECT 1) a WHERE ", null, " IS NULL"))) #' returns one row. expect_equal(nrow(rows), 1L) }, #' #' @section Failure modes: quote_literal_error = function(ctx, con) { #' #' Passing a list expect_error(dbQuoteString(con, as.list(1:3))) #' for the `x` argument raises an error. }, # NULL ) DBItest/R/spec-compliance-methods.R0000644000176200001440000001002414157037563016607 0ustar liggesusers#' spec_compliance_methods #' @family compliance specifications #' @usage NULL #' @format NULL #' @importFrom callr r #' @keywords NULL #' @section DBI classes and methods: spec_compliance_methods <- list( #' A backend defines three classes, compliance = function(ctx) { #' which are subclasses of expect_identical( names(key_methods), c( #' [DBIDriver-class], "Driver", #' [DBIConnection-class], "Connection", #' and [DBIResult-class]. "Result" ) ) pkg <- package_name(ctx) where <- asNamespace(pkg) sapply(names(key_methods), function(name) { dbi_class <- paste0("DBI", name) classes <- Filter(function(class) { extends(class, dbi_class) && getClass(class)@virtual == FALSE }, getClasses(where)) expect_gte(length(classes), 1) class <- classes[[1]] #' The backend provides implementation for all methods #' of these base classes #' that are defined but not implemented by DBI. mapply(function(method, args) { expect_has_class_method(method, class, args, where) }, names(key_methods[[name]]), key_methods[[name]]) }) # }, #' All methods defined in \pkg{DBI} are reexported (so that the package can #' be used without having to attach \pkg{DBI}), reexport = function(ctx) { pkg <- package_name(ctx) where <- asNamespace(pkg) dbi <- asNamespace("DBI") dbi_generics <- grep("^[.]__T__db", getNamespaceExports(dbi), value = TRUE) . <- gsub("^[.]__T__(.*):DBI$", "\\1", dbi_generics) . <- setdiff(., c( "dbDriver", "dbUnloadDriver", "dbListConnections", "dbListResults", "dbSetDataMappings", "dbGetException", "dbCallProc", "dbGetConnectArgs" )) . <- c(., "Id") dbi_names <- . # Suppressing warning "... may not be available when loading" exported_names <- suppressWarnings(callr::r( function(pkg) { tryCatch( getNamespaceExports(getNamespace(pkg)), error = function(e) character() ) }, args = list(pkg = pkg) )) # Guard against scenarios where package is not installed if (length(exported_names) == 0) { skip("reexport: package must be installed for this test") } missing <- setdiff(dbi_names, exported_names) expect_equal(paste(missing, collapse = ", "), "") }, #' and have an ellipsis `...` in their formals for extensibility. ellipsis = function(ctx) { pkg <- package_name(ctx) where <- asNamespace(pkg) methods <- s4_methods(where, function(x) x == "DBI") methods <- methods[grep("^db", names(methods))] Map(expect_ellipsis_in_formals, methods, names(methods)) }, # NULL ) # Helpers ----------------------------------------------------------------- #' @importFrom methods hasMethod expect_has_class_method <- function(name, class, args, driver_package) { full_args <- c(class, args) eval(bquote( expect_true(hasMethod(.(name), .(full_args), driver_package)) )) } expect_ellipsis_in_formals <- function(method, name) { sym <- as.name(name) eval(bquote({ .(sym) <- method expect_true("..." %in% s4_real_argument_names(.(sym))) })) } key_methods <- list( Driver = list( "dbConnect" = NULL, "dbDataType" = NULL ), Connection = list( "dbDisconnect" = NULL, "dbGetInfo" = NULL, "dbSendQuery" = "character", "dbListFields" = "character", "dbListTables" = NULL, "dbReadTable" = "character", "dbWriteTable" = c("character", "data.frame"), "dbExistsTable" = "character", "dbRemoveTable" = "character", "dbBegin" = NULL, "dbCommit" = NULL, "dbRollback" = NULL, "dbIsValid" = NULL, "dbQuoteString" = "character", "dbQuoteIdentifier" = "character" ), Result = list( "dbIsValid" = NULL, "dbFetch" = NULL, "dbClearResult" = NULL, "dbColumnInfo" = NULL, "dbGetRowsAffected" = NULL, "dbGetRowCount" = NULL, "dbHasCompleted" = NULL, "dbGetStatement" = NULL, "dbBind" = NULL ) ) DBItest/R/utils.R0000644000176200001440000000440114157034343013237 0ustar liggesusers`%||%` <- function(a, b) if (is.null(a)) b else a get_pkg_path <- function(ctx) { pkg_name <- package_name(ctx) expect_type(pkg_name, "character") pkg_path <- find.package(pkg_name) pkg_path } utils::globalVariables("con") utils::globalVariables("con2") local_connection <- function(ctx, ..., .local_envir = parent.frame()) { con <- connect(ctx, ...) withr::local_db_connection(con, .local_envir = .local_envir) } local_closed_connection <- function(ctx, ...) { con <- connect(ctx, ...) dbDisconnect(con) con } local_invalid_connection <- function(ctx, ...) { con <- connect(ctx, ...) dbDisconnect(con) unserialize(serialize(con, NULL)) } # Calls `dbClearResult()` on `query` after exiting `frame`. local_result <- function(query, frame = rlang::caller_env()) { res <- query withr::defer( { dbClearResult(res) }, envir = frame ) res } # Calls `try_silent(dbRemoveTable())` after exiting `frame`. local_remove_test_table <- function(con, name, frame = rlang::caller_env()) { table_name <- dbQuoteIdentifier(con, name) withr::defer( try_silent( dbRemoveTable(con, table_name) ), envir = frame ) } get_penguins <- function(ctx) { datasets_penguins <- unrowname(palmerpenguins::penguins[c(1, 153, 277), ]) if (!isTRUE(ctx$tweaks$strict_identifier)) { names(datasets_penguins) <- gsub("_", ".", names(datasets_penguins), fixed = TRUE) } as.data.frame(datasets_penguins) } unrowname <- function(x) { rownames(x) <- NULL x } random_table_name <- function(n = 10) { # FIXME: Use parallel-safe sequence of numbers paste0("dbit", paste(sample(letters, n, replace = TRUE), collapse = "")) } compact <- function(x) { x[!vapply(x, is.null, logical(1L))] } expand_char <- function(...) { df <- expand.grid(..., stringsAsFactors = FALSE) do.call(paste0, df) } try_silent <- function(code) { tryCatch( code, error = function(e) NULL ) } check_df <- function(df) { expect_is(df, "data.frame") if (length(df) >= 1L) { lengths <- vapply(df, length, integer(1L), USE.NAMES = FALSE) expect_equal(diff(lengths), rep(0L, length(lengths) - 1L)) expect_equal(nrow(df), lengths[[1]]) } df_names <- names(df) expect_true(all(df_names != "")) expect_false(anyNA(df_names)) df } DBItest/R/zzz.R0000644000176200001440000000214214157034343012734 0ustar liggesusers.onLoad <- function(libname, pkgname) { if (rlang::is_installed("debugme")) { # Necessary to re-parse environment variable get(".onLoad", asNamespace("debugme"))(libname, pkgname) debugme::debugme() } debug_info() } activate_debugme <- function(bangs = 2) { old_debugme <- remove_from_logging(get_debugme()) old_debugme <- gsub("(.)$", "\\1,", old_debugme) my_debugme <- paste0(strrep("!", bangs), get_pkgname()) set_debugme(paste0(old_debugme, my_debugme)) } deactivate_debugme <- function() { new_debugme <- remove_from_logging(get_debugme()) set_debugme(new_debugme) } get_debugme <- function() { Sys.getenv("DEBUGME") } set_debugme <- function(debugme) { Sys.setenv("DEBUGME" = debugme) message("DEBUGME=", debugme) } remove_from_logging <- function(spec) { spec <- gsub(paste0("!*", get_pkgname(), ""), "", spec) spec <- gsub(",,+", ",", spec) spec } debug_info <- function(pkgname) { "!DEBUG `get_pkgname()` loaded" "!!DEBUG Two bangs" "!!!DEBUG Three bangs" "!!!!DEBUG Four bangs" } get_pkgname <- function() { environmentName(topenv(environment())) } DBItest/R/spec-connection.R0000644000176200001440000000017614157034343015173 0ustar liggesusers#' @format NULL spec_connection <- c( spec_connection_disconnect, spec_connection_data_type, spec_connection_get_info ) DBItest/R/spec-sql-list-fields.R0000644000176200001440000000636114157037563016061 0ustar liggesusers#' spec_sql_list_fields #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_list_fields <- list( list_fields_formals = function() { # expect_equal(names(formals(dbListFields)), c("conn", "name", "...")) }, #' @return #' `dbListFields()` list_fields = function(ctx, con, table_name) { penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) fields <- dbListFields(con, table_name) #' returns a character vector expect_is(fields, "character") #' that enumerates all fields #' in the table in the correct order. expect_identical(fields, names(penguins)) }, #' This also works for temporary tables if supported by the database. list_fields_temporary = function(ctx, con, table_name) { if (isTRUE(ctx$tweaks$temporary_tables) && isTRUE(ctx$tweaks$list_temporary_tables)) { dbWriteTable(con, table_name, data.frame(a = 1L, b = 2L), temporary = TRUE) fields <- dbListFields(con, table_name) expect_equal(fields, c("a", "b")) #' The returned names are suitable for quoting with `dbQuoteIdentifier()`. expect_equal(dbQuoteIdentifier(con, fields), dbQuoteIdentifier(con, c("a", "b"))) } }, #' #' @section Failure modes: #' If the table does not exist, an error is raised. list_fields_wrong_table = function(con) { name <- "missing" expect_false(dbExistsTable(con, name)) expect_error(dbListFields(con, name)) }, #' Invalid types for the `name` argument list_fields_invalid_type = function(con) { #' (e.g., `character` of length not equal to one, expect_error(dbListFields(con, character())) expect_error(dbListFields(con, letters)) #' or numeric) expect_error(dbListFields(con, 1)) #' lead to an error. }, #' An error is also raised when calling this method for a closed list_fields_closed_connection = function(ctx, closed_con) { expect_error(dbListFields(closed_con, "test")) }, #' or invalid connection. list_fields_invalid_connection = function(ctx, invalid_con) { expect_error(dbListFields(invalid_con, "test")) }, #' @section Specification: #' #' The `name` argument can be #' #' - a string #' - the return value of [dbQuoteIdentifier()] list_fields_quoted = function(con, table_name) { dbWriteTable(con, table_name, data.frame(a = 1L, b = 2L)) expect_identical( dbListFields(con, dbQuoteIdentifier(con, table_name)), c("a", "b") ) }, #' - a value from the `table` column from the return value of #' [dbListObjects()] where `is_prefix` is `FALSE` list_fields_object = function(con, table_name) { dbWriteTable(con, table_name, data.frame(a = 1L, b = 2L)) objects <- dbListObjects(con) expect_gt(nrow(objects), 0) expect_false(all(objects$is_prefix)) expect_identical( dbListFields(con, objects$table[[1]]), dbListFields(con, dbQuoteIdentifier(con, objects$table[[1]])) ) }, #' #' A column named `row_names` is treated like any other column. list_fields_row_names = function(con, table_name) { dbWriteTable(con, table_name, data.frame(a = 1L, row_names = 2L)) expect_identical(dbListFields(con, table_name), c("a", "row_names")) }, # NULL ) DBItest/R/spec-result-fetch.R0000644000176200001440000001373614157037563015456 0ustar liggesusers#' spec_result_fetch #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_fetch <- list( fetch_formals = function() { # expect_equal(names(formals(dbFetch)), c("res", "n", "...")) }, #' @return #' `dbFetch()` always returns a [data.frame] #' with as many rows as records were fetched and as many #' columns as fields in the result set, #' even if the result is a single value fetch_atomic = function(con) { query <- trivial_query() res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res)) expect_equal(rows, data.frame(a = 1.5)) }, #' or has one fetch_one_row = function(con) { query <- trivial_query(3, letters[1:3]) result <- trivial_df(3, letters[1:3]) res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res)) expect_identical(rows, result) }, #' or zero rows. fetch_zero_rows = function(con) { query <- "SELECT * FROM (SELECT 1 as a, 2 as b, 3 as c) AS x WHERE (1 = 0)" res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res)) expect_identical(class(rows), "data.frame") }, #' #' @section Failure modes: #' An attempt to fetch from a closed result set raises an error. fetch_closed = function(con) { query <- trivial_query() res <- dbSendQuery(con, query) dbClearResult(res) expect_error(dbFetch(res)) }, #' If the `n` argument is not an atomic whole number #' greater or equal to -1 or Inf, an error is raised, fetch_n_bad = function(con) { query <- trivial_query() res <- local_result(dbSendQuery(con, query)) expect_error(dbFetch(res, -2)) expect_error(dbFetch(res, 1.5)) expect_error(dbFetch(res, integer())) expect_error(dbFetch(res, 1:3)) expect_error(dbFetch(res, NA_integer_)) }, #' but a subsequent call to `dbFetch()` with proper `n` argument succeeds. fetch_n_good_after_bad = function(con) { query <- trivial_query() res <- local_result(dbSendQuery(con, query)) expect_error(dbFetch(res, NA_integer_)) rows <- check_df(dbFetch(res)) expect_equal(rows, data.frame(a = 1.5)) }, #' Calling `dbFetch()` on a result set from a data manipulation query #' created by [dbSendStatement()] #' can be fetched and return an empty data frame, with a warning. fetch_no_return_value = function(con, table_name) { query <- paste0("CREATE TABLE ", table_name, " (a integer)") res <- local_result(dbSendStatement(con, query)) expect_warning(rows <- check_df(dbFetch(res))) expect_identical(rows, data.frame()) }, #' @section Specification: #' Fetching multi-row queries with one fetch_multi_row_single_column = function(ctx, con) { query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(3) res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res)) expect_identical(rows, result) }, #' or more columns by default returns the entire result. fetch_multi_row_multi_column = function(ctx, con) { query <- union( .ctx = ctx, paste("SELECT", 1:5 + 0.5, "AS a,", 4:0 + 0.5, "AS b"), .order_by = "a" ) res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res)) expect_identical(rows, data.frame(a = 1:5 + 0.5, b = 4:0 + 0.5)) }, #' Multi-row queries can also be fetched progressively fetch_n_progressive = function(ctx, con) { query <- trivial_query(25, .ctx = ctx, .order_by = "a") result <- trivial_df(25) res <- local_result(dbSendQuery(con, query)) #' by passing a whole number ([integer] rows <- check_df(dbFetch(res, 10L)) expect_identical(rows, unrowname(result[1:10, , drop = FALSE])) #' or [numeric]) rows <- check_df(dbFetch(res, 10)) expect_identical(rows, unrowname(result[11:20, , drop = FALSE])) #' as the `n` argument. rows <- check_df(dbFetch(res, n = 5)) expect_identical(rows, unrowname(result[21:25, , drop = FALSE])) }, #' A value of [Inf] for the `n` argument is supported #' and also returns the full result. fetch_n_multi_row_inf = function(ctx, con) { query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(3) res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res, n = Inf)) expect_identical(rows, result) }, #' If more rows than available are fetched, the result is returned in full #' without warning. fetch_n_more_rows = function(ctx, con) { query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(3) res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res, 5L)) expect_identical(rows, result) #' If fewer rows than requested are returned, further fetches will #' return a data frame with zero rows. rows <- check_df(dbFetch(res)) expect_identical(rows, result[0, , drop = FALSE]) }, #' If zero rows are fetched, the columns of the data frame are still fully #' typed. fetch_n_zero_rows = function(ctx, con) { query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(0) res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res, 0L)) expect_identical(rows, result) }, #' Fetching fewer rows than available is permitted, #' no warning is issued when clearing the result set. fetch_n_premature_close = function(ctx, con) { query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(2) res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res, 2L)) expect_identical(rows, result) }, #' #' A column named `row_names` is treated like any other column. fetch_row_names = function(con) { query <- trivial_query(column = "row_names") result <- trivial_df(column = "row_names") res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res)) expect_identical(rows, result) expect_identical(.row_names_info(rows), -1L) }, # NULL ) DBItest/R/spec-sql-remove-table.R0000644000176200001440000001406314157037563016222 0ustar liggesusers#' spec_sql_remove_table #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_remove_table <- list( remove_table_formals = function() { # expect_equal(names(formals(dbRemoveTable)), c("conn", "name", "...")) }, #' @return #' `dbRemoveTable()` returns `TRUE`, invisibly. remove_table_return = function(ctx, con, table_name) { penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) expect_invisible_true(dbRemoveTable(con, table_name)) }, #' #' @section Failure modes: #' If the table does not exist, an error is raised. remove_table_missing = function(con, table_name) { expect_error(dbRemoveTable(con, table_name)) }, #' An attempt to remove a view with this function may result in an error. #' #' #' An error is raised when calling this method for a closed remove_table_closed_connection = function(ctx, con, table_name) { dbWriteTable(con, table_name, data.frame(a = 1)) con2 <- local_closed_connection(ctx = ctx) expect_error(dbRemoveTable(con2, table_name)) }, #' or invalid connection. remove_table_invalid_connection = function(ctx, con, table_name) { dbWriteTable(con, table_name, data.frame(a = 1)) con2 <- local_invalid_connection(ctx) expect_error(dbRemoveTable(con2, table_name)) }, #' An error is also raised remove_table_error = function(con, table_name) { dbWriteTable(con, table_name, data.frame(a = 1L)) #' if `name` cannot be processed with [dbQuoteIdentifier()] expect_error(dbRemoveTable(con, NA)) #' or if this results in a non-scalar. expect_error(dbRemoveTable(con, c(table_name, table_name))) }, #' @section Additional arguments: #' The following arguments are not part of the `dbRemoveTable()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `temporary` (default: `FALSE`) #' - `fail_if_missing` (default: `TRUE`) #' #' These arguments must be provided as named arguments. #' #' If `temporary` is `TRUE`, the call to `dbRemoveTable()` #' will consider only temporary tables. remove_table_temporary_arg = function(ctx, con, table_name) { #' Not all backends support this argument. if (!isTRUE(ctx$tweaks$temporary_tables)) { skip("tweak: temporary_tables") } dbWriteTable(con, table_name, data.frame(a = 1.5)) expect_equal(dbReadTable(con, table_name), data.frame(a = 1.5)) dbCreateTable(con, table_name, data.frame(b = 2.5), temporary = TRUE) dbRemoveTable(con, table_name, temporary = TRUE) #' In particular, permanent tables of the same name are left untouched. expect_error(dbRemoveTable(con, table_name, temporary = TRUE)) expect_equal(dbReadTable(con, table_name), data.frame(a = 1.5)) }, #' #' If `fail_if_missing` is `FALSE`, the call to `dbRemoveTable()` #' succeeds if the table does not exist. remove_table_missing_succeed = function(con, table_name) { expect_error(dbRemoveTable(con, table_name, fail_if_missing = FALSE), NA) }, #' @section Specification: #' A table removed by `dbRemoveTable()` doesn't appear in the list of tables #' returned by [dbListTables()], #' and [dbExistsTable()] returns `FALSE`. remove_table_list = function(con, table_name) { dbWriteTable(con, table_name, data.frame(a = 1L)) expect_true(table_name %in% dbListTables(con)) expect_true(dbExistsTable(con, table_name)) dbRemoveTable(con, table_name) expect_false(table_name %in% dbListTables(con)) expect_false(dbExistsTable(con, table_name)) }, #' The removal propagates immediately to other connections to the same database. remove_table_other_con = function(ctx, con, table_name) { con2 <- local_connection(ctx) dbWriteTable(con, table_name, data.frame(a = 1L)) expect_true(table_name %in% dbListTables(con2)) expect_true(dbExistsTable(con2, table_name)) dbRemoveTable(con, table_name) expect_false(table_name %in% dbListTables(con2)) expect_false(dbExistsTable(con2, table_name)) }, #' This function can also be used to remove a temporary table. remove_table_temporary = function(ctx, con, table_name) { if (!isTRUE(ctx$tweaks$temporary_tables)) { skip("tweak: temporary_tables") } dbWriteTable(con, table_name, data.frame(a = 1L), temporary = TRUE) if (isTRUE(ctx$tweaks$list_temporary_tables)) { expect_true(table_name %in% dbListTables(con)) } expect_true(dbExistsTable(con, table_name)) dbRemoveTable(con, table_name) if (isTRUE(ctx$tweaks$list_temporary_tables)) { expect_false(table_name %in% dbListTables(con)) } expect_false(dbExistsTable(con, table_name)) }, #' #' The `name` argument is processed as follows, remove_table_name = function(ctx, con) { #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } test_in <- data.frame(a = 1L) for (table_name in table_names) { local_remove_test_table(con, table_name) #' - If an unquoted table name as string: `dbRemoveTable()` will do the #' quoting, dbWriteTable(con, table_name, test_in) expect_true(dbRemoveTable(con, table_name)) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` } }, #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done remove_table_name_quoted = function(ctx, con) { if (as.package_version(ctx$tweaks$dbitest_version) < "1.7.2") { skip(paste0("tweak: dbitest_version: ", ctx$tweaks$dbitest_version)) } if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } test_in <- data.frame(a = 1L) for (table_name in table_names) { local_remove_test_table(con, table_name) dbWriteTable(con, table_name, test_in) expect_true(dbRemoveTable(con, dbQuoteIdentifier(con, table_name))) } }, # NULL ) DBItest/R/spec-sql-quote-string.R0000644000176200001440000001157014157037563016301 0ustar liggesusers#' spec_sql_quote_string #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_quote_string <- list( quote_string_formals = function() { # expect_equal(names(formals(dbQuoteString)), c("conn", "x", "...")) }, #' @return quote_string_return = function(con) { #' `dbQuoteString()` returns an object that can be coerced to [character], simple <- "simple" simple_out <- dbQuoteString(con, simple) expect_error(as.character(simple_out), NA) expect_type(as.character(simple_out), "character") expect_equal(length(simple_out), 1L) }, # quote_string_vectorized = function(con) { #' of the same length as the input. letters_out <- dbQuoteString(con, letters) expect_equal(length(letters_out), length(letters)) #' For an empty character vector this function returns a length-0 object. empty_out <- dbQuoteString(con, character()) expect_equal(length(empty_out), 0L) }, # quote_string_double = function(con) { simple <- "simple" simple_out <- dbQuoteString(con, simple) letters_out <- dbQuoteString(con, letters) empty <- character() empty_out <- dbQuoteString(con, character()) #' #' When passing the returned object again to `dbQuoteString()` #' as `x` #' argument, it is returned unchanged. expect_identical(dbQuoteString(con, simple_out), simple_out) expect_identical(dbQuoteString(con, letters_out), letters_out) expect_identical(dbQuoteString(con, empty_out), empty_out) #' Passing objects of class [SQL] should also return them unchanged. expect_identical(dbQuoteString(con, SQL(simple)), SQL(simple)) expect_identical(dbQuoteString(con, SQL(letters)), SQL(letters)) expect_identical(dbQuoteString(con, SQL(empty)), SQL(empty)) #' (For backends it may be most convenient to return [SQL] objects #' to achieve this behavior, but this is not required.) }, #' @section Specification: quote_string_roundtrip = function(ctx, con) { do_test_string <- function(x) { #' The returned expression can be used in a `SELECT ...` query, query <- paste0("SELECT ", paste(dbQuoteString(con, x), collapse = ", ")) #' and for any scalar character `x` the value of #' \code{dbGetQuery(paste0("SELECT ", dbQuoteString(x)))[[1]]} #' must be identical to `x`, x_out <- check_df(dbGetQuery(con, query)) expect_equal(nrow(x_out), 1L) expect_identical(unlist(unname(x_out)), x) } test_chars <- c( #' even if `x` contains "", #' spaces, " ", #' tabs, "\t", #' quotes (single "'", #' or double), '"', #' backticks, "`", #' or newlines "\n" ) #' (in any combination) # length(test_chars) ** 3 test_strings_0 <- expand_char(test_chars, "a", test_chars, "b", test_chars) #' or is itself the result of a `dbQuoteString()` call coerced back to #' character (even repeatedly). test_strings_1 <- as.character(dbQuoteString(con, test_strings_0)) test_strings_2 <- as.character(dbQuoteString(con, test_strings_1)) test_strings <- c(test_strings_0, test_strings_1, test_strings_2) do_test_string(test_strings) }, # quote_string_na = function(ctx, con) { null <- dbQuoteString(con, NA_character_) quoted_null <- dbQuoteString(con, as.character(null)) na <- dbQuoteString(con, "NA") quoted_na <- dbQuoteString(con, as.character(na)) query <- paste0( "SELECT ", null, " AS null_return,", na, " AS na_return,", quoted_null, " AS quoted_null,", quoted_na, " AS quoted_na" ) #' If `x` is `NA`, the result must merely satisfy [is.na()]. rows <- check_df(dbGetQuery(con, query)) expect_true(is.na(rows$null_return)) #' The strings `"NA"` or `"NULL"` are not treated specially. expect_identical(rows$na_return, "NA") expect_identical(rows$quoted_null, as.character(null)) expect_identical(rows$quoted_na, as.character(na)) }, # quote_string_na_is_null = function(ctx, con) { #' #' `NA` should be translated to an unquoted SQL `NULL`, null <- dbQuoteString(con, NA_character_) #' so that the query `SELECT * FROM (SELECT 1) a WHERE ... IS NULL` rows <- check_df(dbGetQuery(con, paste0("SELECT * FROM (SELECT 1) a WHERE ", null, " IS NULL"))) #' returns one row. expect_equal(nrow(rows), 1L) }, #' #' @section Failure modes: quote_string_error = function(ctx, con) { #' #' Passing a numeric, expect_error(dbQuoteString(con, c(1, 2, 3))) #' integer, expect_error(dbQuoteString(con, 1:3)) #' logical, expect_error(dbQuoteString(con, c(TRUE, FALSE))) #' or raw vector, expect_error(dbQuoteString(con, as.raw(1:3))) #' or a list expect_error(dbQuoteString(con, as.list(1:3))) #' for the `x` argument raises an error. }, # NULL ) DBItest/R/spec-sql.R0000644000176200001440000000055314157034343013632 0ustar liggesusers#' @format NULL spec_sql <- c( spec_sql_quote_string, spec_sql_quote_literal, spec_sql_quote_identifier, spec_sql_unquote_identifier, spec_sql_read_table, spec_sql_create_table, spec_sql_append_table, spec_sql_write_table, spec_sql_list_tables, spec_sql_exists_table, spec_sql_remove_table, spec_sql_list_objects, spec_sql_list_fields ) DBItest/R/expectations.R0000644000176200001440000000453214157034343014612 0ustar liggesusersexpect_arglist_is_empty <- function(object) { act <- quasi_label(rlang::enquo(object), arg = "object") act$formals <- formals(act$val) expect( is.null(act$formals), sprintf("%s has an empty argument list.", act$lab) ) invisible(act$val) } expect_all_args_have_default_values <- function(object) { act <- quasi_label(rlang::enquo(object), arg = "object") act$args <- formals(act$val) act$args <- act$args[names(act$args) != "..."] act$char_args <- vapply(act$args, as.character, character(1L)) expect( all(nzchar(act$char_args, keepNA = FALSE)), sprintf("%s has arguments without default values", act$lab) ) invisible(act$val) } has_method <- function(method_name) { function(x) { my_class <- class(x) expect_true( length(findMethod(method_name, my_class)) > 0L, paste("object of class", my_class, "has no", method_name, "method") ) } } expect_visible <- function(code) { ret <- withVisible(code) expect_true(ret$visible) ret$value } expect_invisible_true <- function(code) { ret <- withVisible(code) expect_true(ret$value) test_that("Visibility", { expect_false(ret$visible) }) invisible(ret$value) } expect_equal_df <- function(actual, expected) { factor_cols <- vapply(expected, is.factor, logical(1L)) expected[factor_cols] <- lapply(expected[factor_cols], as.character) asis_cols <- vapply(expected, inherits, "AsIs", FUN.VALUE = logical(1L)) expected[asis_cols] <- lapply(expected[asis_cols], unclass) list_cols <- vapply(expected, is.list, logical(1L)) if (!any(list_cols)) { order_actual <- do.call(order, actual) order_expected <- do.call(order, expected) } else { expect_false(all(list_cols)) expect_equal(anyDuplicated(actual[!list_cols]), 0) expect_equal(anyDuplicated(expected[!list_cols]), 0) order_actual <- do.call(order, actual[!list_cols]) order_expected <- do.call(order, expected[!list_cols]) } has_rownames_actual <- is.character(attr(actual, "row.names")) has_rownames_expected <- is.character(attr(expected, "row.names")) expect_equal(has_rownames_actual, has_rownames_expected) if (has_rownames_actual) { expect_equal(sort(row.names(actual)), sort(row.names(expected))) } actual <- unrowname(actual[order_actual, ]) expected <- unrowname(expected[order_expected, ]) expect_identical(actual, expected) } DBItest/R/test-all.R0000644000176200001440000000560314157034343013631 0ustar liggesusers#' Run all tests #' #' `test_all()` calls all tests defined in this package (see the section #' "Tests" below). This function supports running only one test by setting an #' environment variable, e.g., set the `DBITEST_ONLY_RESULT` to a nonempty #' value to run only `test_result()`. #' #' Internally `^` and `$` are used as prefix and suffix around the #' regular expressions passed in the `skip` and `run_only` arguments. #' #' @section Tests: #' This function runs the following tests, except the stress tests: #' #' @param skip `[character()]`\cr A vector of regular expressions to match #' against test names; skip test if matching any. #' The regular expressions are matched against the entire test name. #' @param run_only `[character()]`\cr A vector of regular expressions to match #' against test names; run only these tests. #' The regular expressions are matched against the entire test name. #' @param ctx `[DBItest_context]`\cr A test context as created by #' [make_context()]. #' #' @export test_all <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { run_all <- length(grep("^DBITEST_ONLY_", names(Sys.getenv()))) == 0 if (run_all || Sys.getenv("DBITEST_ONLY_GETTING_STARTED") != "") test_getting_started(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_DRIVER") != "") test_driver(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_CONNECTION") != "") test_connection(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_RESULT") != "") test_result(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_SQL") != "") test_sql(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_META") != "") test_meta(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_TRANSACTION") != "") test_transaction(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_COMPLIANCE") != "") test_compliance(skip = skip, run_only = run_only, ctx = ctx) # stress tests are not tested by default (#92) invisible() } #' @rdname test_all #' @description `test_some()` allows testing one or more tests. #' @param test `[character]`\cr #' A character vector of regular expressions #' describing the tests to run. #' The regular expressions are matched against the entire test name. #' @param dblog `[logical(1)]`\cr #' Set to `FALSE` to disable dblog integration. #' @export test_some <- function(test, ctx = get_default_context(), dblog = TRUE) { if (dblog) { logger <- dblog::make_collect_logger(display = TRUE) ctx$cnr <- dblog::dblog_cnr(ctx$cnr, logger) ctx$drv <- ctx$cnr@.drv } test_all(run_only = test, ctx = ctx) if (dblog && is_interactive()) { clipr::write_clip(logger$retrieve()) message("DBI calls written to clipboard.") } } DBItest/R/spec-result-create-table-with-data-type.R0000644000176200001440000000224714157037563021547 0ustar liggesusers#' spec_result_create_table_with_data_type #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_create_table_with_data_type <- list( #' @section Specification: #' All data types returned by `dbDataType()` are usable in an SQL statement #' of the form data_type_create_table = function(ctx, con) { check_connection_data_type <- function(value) { table_name <- random_table_name() local_remove_test_table(con, table_name) #' `"CREATE TABLE test (a ...)"`. query <- paste0("CREATE TABLE ", table_name, " (a ", dbDataType(con, value), ")") eval(bquote(dbExecute(con, .(query)))) } expect_conn_has_data_type <- function(value) { eval(bquote( expect_error(check_connection_data_type(.(value)), NA) )) } expect_conn_has_data_type(logical(1)) expect_conn_has_data_type(integer(1)) expect_conn_has_data_type(numeric(1)) expect_conn_has_data_type(character(1)) expect_conn_has_data_type(Sys.Date()) expect_conn_has_data_type(Sys.time()) if (!isTRUE(ctx$tweaks$omit_blob_tests)) { expect_conn_has_data_type(list(as.raw(0:10))) } }, # NULL ) DBItest/R/spec-driver-get-info.R0000644000176200001440000000144314157037563016042 0ustar liggesusers#' spec_driver_get_info #' @family driver specifications #' @usage NULL #' @format NULL #' @keywords NULL #' @name spec_get_info spec_driver_get_info <- list( #' @return #' For objects of class [DBIDriver-class], `dbGetInfo()` get_info_driver = function(ctx) { info <- dbGetInfo(ctx$drv) #' returns a named list expect_type(info, "list") info_names <- names(info) #' that contains at least the following components: #' necessary_names <- c( #' - `driver.version`: the package version of the DBI backend, "driver.version", #' - `client.version`: the version of the DBMS client library. "client.version" ) for (name in necessary_names) { eval(bquote( expect_true(.(name) %in% info_names) )) } }, # NULL ) DBItest/R/spec-meta-bind.R0000644000176200001440000002432214157037563014702 0ustar liggesusers#' spec_meta_bind #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_bind <- list( bind_formals = function() { # expect_equal(names(formals(dbBind)), c("res", "params", "...")) }, #' @return bind_return_value = function(ctx, con) { extra <- new_bind_tester_extra( check_return_value = function(bind_res, res) { #' `dbBind()` returns the result set, expect_identical(res, bind_res$value) #' invisibly, expect_false(bind_res$visible) } ) #' for queries issued by [dbSendQuery()] test_select_bind(con, ctx, 1L, extra = extra) #' and also for data manipulation statements issued by #' [dbSendStatement()]. test_select_bind(con, ctx, 1L, extra = extra, query = FALSE) }, #' #' @section Failure modes: bind_empty = function(con) { #' Calling `dbBind()` for a query without parameters res <- local_result(dbSendQuery(con, trivial_query())) #' raises an error. expect_error(dbBind(res, list())) }, # bind_too_many = function(ctx, con) { extra <- new_bind_tester_extra( patch_bind_values = function(bind_values) { #' Binding too many if (is.null(names(bind_values))) { c(bind_values, bind_values[[1L]]) } else { c(bind_values, bogus = bind_values[[1L]]) } }, bind_error = function() ".*" ) test_select_bind(con, ctx, 1L, extra = extra) }, # bind_not_enough = function(ctx, con) { extra <- new_bind_tester_extra( patch_bind_values = function(bind_values) { #' or not enough values, bind_values[-1L] }, bind_error = function() ".*" ) test_select_bind(con, ctx, 1L, extra = extra) }, # bind_wrong_name = function(ctx, con) { extra <- new_bind_tester_extra( patch_bind_values = function(bind_values) { #' or parameters with wrong names stats::setNames(bind_values, paste0("bogus", names(bind_values))) }, # requires_names = function() TRUE, bind_error = function() ".*" ) test_select_bind(con, ctx, 1L, extra = extra) }, # bind_multi_row_unequal_length = function(ctx, con) { extra <- new_bind_tester_extra( patch_bind_values = function(bind_values) { #' or unequal length, bind_values[[2]] <- bind_values[[2]][-1] bind_values }, bind_error = function() ".*" ) #' also raises an error. test_select_bind( con, ctx, list(1:3, 2:4), extra = extra, query = FALSE ) }, #' If the placeholders in the query are named, bind_named_param_unnamed_placeholders = function(ctx, con) { extra <- new_bind_tester_extra( patch_bind_values = function(bind_values) { #' all parameter values must have names stats::setNames(bind_values, NULL) }, bind_error = function() ".*", # requires_names = function() TRUE ) test_select_bind(con, ctx, 1L, extra = extra) }, # bind_named_param_empty_placeholders = function(ctx, con) { extra <- new_bind_tester_extra( patch_bind_values = function(bind_values) { #' (which must not be empty names(bind_values)[[1]] <- "" }, bind_error = function() ".*", # requires_names = function() TRUE ) test_select_bind(con, ctx, list(1L, 2L), extra = extra) }, # bind_named_param_na_placeholders = function(ctx, con) { extra <- new_bind_tester_extra( patch_bind_values = function(bind_values) { #' or `NA`), names(bind_values)[[1]] <- NA }, bind_error = function() ".*", # requires_names = function() TRUE ) test_select_bind(con, ctx, list(1L, 2L), extra = extra) }, #' and vice versa, bind_unnamed_param_named_placeholders = function(ctx, con) { extra <- new_bind_tester_extra( patch_bind_values = function(bind_values) { stats::setNames(bind_values, letters[seq_along(bind_values)]) }, bind_error = function() ".*", # requires_names = function() FALSE ) #' otherwise an error is raised. test_select_bind(con, ctx, 1L, extra = extra) }, #' The behavior for mixing placeholders of different types #' (in particular mixing positional and named placeholders) #' is not specified. #' bind_premature_clear = function(ctx, con) { extra <- new_bind_tester_extra( #' Calling `dbBind()` on a result set already cleared by [dbClearResult()] is_premature_clear = function() TRUE ) #' also raises an error. expect_error( test_select_bind(con, ctx, 1L, extra = extra) ) }, #' @section Specification: #' The elements of the `params` argument do not need to be scalars, bind_multi_row = function(ctx, con) { #' vectors of arbitrary length test_select_bind(con, ctx, list(1:3)) }, # bind_multi_row_zero_length = function(ctx, con) { #' (including length 0) test_select_bind(con, ctx, list(integer(), integer())) #' are supported. # This behavior is tested as part of run_bind_tester$fun #' For queries, calling `dbFetch()` binding such parameters returns #' concatenated results, equivalent to binding and fetching for each set #' of values and connecting via [rbind()]. }, # bind_multi_row_statement = function(ctx, con) { # This behavior is tested as part of run_bind_tester$fun #' For data manipulation statements, `dbGetRowsAffected()` returns the #' total number of rows affected if binding non-scalar parameters. test_select_bind(con, ctx, list(1:3), query = FALSE) }, # bind_repeated = function(ctx, con) { extra <- new_bind_tester_extra( #' `dbBind()` also accepts repeated calls on the same result set is_repeated = function() TRUE ) #' for both queries test_select_bind(con, ctx, 1L, extra = extra) #' and data manipulation statements, test_select_bind(con, ctx, 1L, extra = extra, query = FALSE) }, # bind_repeated_untouched = function(ctx, con) { extra <- new_bind_tester_extra( #' even if no results are fetched between calls to `dbBind()`, is_repeated = function() TRUE, is_untouched = function() TRUE ) #' for both queries test_select_bind(con, ctx, 1L, extra = extra) #' and data manipulation statements. test_select_bind(con, ctx, 1L, extra = extra, query = FALSE) }, #' #' If the placeholders in the query are named, bind_named_param_shuffle = function(ctx, con) { extra <- new_bind_tester_extra( patch_bind_values = function(bind_values) { #' their order in the `params` argument is not important. bind_values[c(3, 1, 2, 4)] }, # requires_names = function() TRUE ) test_select_bind(con, ctx, c(1:3 + 0.5, NA), extra = extra) }, #' #' At least the following data types are accepted on input (including [NA]): #' - [integer] bind_integer = function(ctx, con) { test_select_bind(con, ctx, c(1:3, NA)) }, #' - [numeric] bind_numeric = function(ctx, con) { test_select_bind(con, ctx, c(1:3 + 0.5, NA)) }, #' - [logical] for Boolean values bind_logical = function(ctx, con) { test_select_bind(con, ctx, c(TRUE, FALSE, NA)) }, #' - [character] bind_character = function(ctx, con) { test_select_bind(con, ctx, c(texts, NA)) }, #' (also with special characters such as spaces, newlines, quotes, and backslashes) bind_character_escape = function(ctx, con) { test_select_bind(con, ctx, c(" ", "\n", "\r", "\b", "'", '"', "[", "]", "\\", NA)) }, #' - [factor] (bound as character, bind_factor = function(ctx, con) { #' with warning) expect_warning( test_select_bind( con, ctx, lapply(c(texts, NA_character_), factor) ) ) }, #' - [Date] bind_date = function(ctx, con) { if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } test_select_bind(con, ctx, c(Sys.Date() + 0:2, NA)) }, #' (also when stored internally as integer) bind_date_integer = function(ctx, con) { if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } test_select_bind(con, ctx, structure(c(18618:18620, NA), class = "Date")) }, #' - [POSIXct] timestamps bind_timestamp = function(ctx, con) { if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } data_in <- as.POSIXct(c(round(Sys.time()) + 0:2, NA)) test_select_bind(con, ctx, data_in) }, #' - [POSIXlt] timestamps bind_timestamp_lt = function(ctx, con) { if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } data_in <- lapply( round(Sys.time()) + c(0:2, NA), as.POSIXlt ) test_select_bind(con, ctx, data_in) }, #' - [difftime] values bind_time_seconds = function(ctx, con) { if (!isTRUE(ctx$tweaks$time_typed)) { skip("tweak: !time_typed") } data_in <- as.difftime(as.numeric(c(1:3, NA)), units = "secs") test_select_bind(con, ctx, data_in) }, #' (also with units other than seconds bind_time_hours = function(ctx, con) { if (!isTRUE(ctx$tweaks$time_typed)) { skip("tweak: !time_typed") } data_in <- as.difftime(as.numeric(c(1:3, NA)), units = "hours") test_select_bind(con, ctx, data_in) }, #' and with the value stored as integer) bind_time_minutes_integer = function(ctx, con) { if (!isTRUE(ctx$tweaks$time_typed)) { skip("tweak: !time_typed") } data_in <- as.difftime(c(1:3, NA), units = "mins") test_select_bind(con, ctx, data_in) }, #' - lists of [raw] for blobs (with `NULL` entries for SQL NULL values) bind_raw = function(ctx, con) { if (isTRUE(ctx$tweaks$omit_blob_tests)) { skip("tweak: omit_blob_tests") } test_select_bind( con, ctx, list(list(as.raw(1:10)), list(raw(3)), list(NULL)), cast_fun = ctx$tweaks$blob_cast ) }, #' - objects of type [blob::blob] bind_blob = function(ctx, con) { if (isTRUE(ctx$tweaks$omit_blob_tests)) { skip("tweak: omit_blob_tests") } test_select_bind( con, ctx, list(blob::blob(as.raw(1:10)), blob::blob(raw(3)), blob::blob(NULL)), cast_fun = ctx$tweaks$blob_cast ) }, # NULL ) DBItest/R/spec-result-execute.R0000644000176200001440000000555314157037563016025 0ustar liggesusers#' spec_result_execute #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_execute <- list( execute_formals = function() { # expect_equal(names(formals(dbExecute)), c("conn", "statement", "...")) }, #' @return #' `dbExecute()` always returns a execute_atomic = function(ctx, con, table_name) { query <- trivial_statement(ctx, table_name) ret <- dbExecute(con, query) #' scalar expect_equal(length(ret), 1) #' numeric expect_true(is.numeric(ret)) #' that specifies the number of rows affected #' by the statement. }, #' #' @section Failure modes: #' An error is raised when issuing a statement over a closed execute_closed_connection = function(ctx, closed_con) { table_name <- "dbit12" expect_error(dbExecute(closed_con, trivial_statement(ctx, table_name = table_name))) }, #' or invalid connection, execute_invalid_connection = function(ctx, invalid_con) { table_name <- "dbit13" expect_error(dbExecute(invalid_con, trivial_statement(ctx, table_name = table_name))) }, #' if the syntax of the statement is invalid, execute_syntax_error = function(con) { expect_error(dbExecute(con, "CREATTE")) }, #' or if the statement is not a non-`NA` string. execute_non_string = function(con) { expect_error(dbExecute(con, character())) expect_error(dbExecute(con, letters)) expect_error(dbExecute(con, NA_character_)) }, #' @section Additional arguments: #' The following arguments are not part of the `dbExecute()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `params` (default: `NULL`) #' - `immediate` (default: `NULL`) #' #' They must be provided as named arguments. #' See the "Specification" sections for details on their usage. #' @section Specification: #' #' The `param` argument allows passing query parameters, see [dbBind()] for details. execute_params = function(ctx, con) { placeholder_funs <- get_placeholder_funs(ctx) for (placeholder_fun in placeholder_funs) { table_name <- random_table_name() local_remove_test_table(con, table_name) dbWriteTable(con, table_name, data.frame(a = as.numeric(1:3))) placeholder <- placeholder_fun(1) query <- paste0("DELETE FROM ", table_name, " WHERE a > ", placeholder) values <- 1.5 params <- stats::setNames(list(values), names(placeholder)) ret <- dbExecute(con, query, params = params) expect_equal(ret, 2, info = placeholder) } }, #' @inheritSection spec_result_get_query Specification for the `immediate` argument execute_immediate = function(ctx, con, table_name) { res <- expect_visible(dbExecute(con, trivial_statement(ctx, table_name), immediate = TRUE)) expect_true(is.numeric(res)) }, # NULL ) DBItest/R/test-meta.R0000644000176200001440000000057614157034343014013 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_meta()]: #' Test metadata functions NULL #' Test metadata functions #' #' @inheritParams test_all #' @include test-sql.R #' @family tests #' @export test_meta <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Metadata" run_tests(ctx, spec_meta, skip, run_only, test_suite) } DBItest/R/spec-transaction-with-transaction.R0000644000176200001440000000623514157037563020666 0ustar liggesusers#' spec_transaction_with_transaction #' @family transaction specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_transaction_with_transaction <- list( with_transaction_formals = function() { # expect_equal(names(formals(dbWithTransaction)), c("conn", "code", "...")) }, #' @return #' `dbWithTransaction()` returns the value of the executed code. with_transaction_return_value = function(con) { name <- random_table_name() expect_identical(dbWithTransaction(con, name), name) }, #' #' @section Failure modes: #' Failure to initiate the transaction #' (e.g., if the connection is closed with_transaction_error_closed = function(ctx, closed_con) { expect_error(dbWithTransaction(closed_con, NULL)) }, #' or invalid with_transaction_error_invalid = function(ctx, invalid_con) { expect_error(dbWithTransaction(invalid_con, NULL)) }, #' of if [dbBegin()] has been called already) with_transaction_error_nested = function(con) { dbBegin(con) #' gives an error. expect_error(dbWithTransaction(con, NULL)) dbRollback(con) }, #' @section Specification: #' `dbWithTransaction()` initiates a transaction with `dbBegin()`, executes #' the code given in the `code` argument, and commits the transaction with #' [dbCommit()]. with_transaction_success = function(con, table_name) { dbWriteTable(con, table_name, data.frame(a = 0L), overwrite = TRUE) dbWithTransaction( con, { dbWriteTable(con, table_name, data.frame(a = 1L), append = TRUE) expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0:1)) } ) expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0:1)) }, #' If the code raises an error, the transaction is instead aborted with #' [dbRollback()], and the error is propagated. with_transaction_failure = function(con, table_name) { dbWriteTable(con, table_name, data.frame(a = 0L), overwrite = TRUE) name <- random_table_name() expect_error( dbWithTransaction( con, { dbWriteTable(con, table_name, data.frame(a = 1L), append = TRUE) stop(name) } ), name, fixed = TRUE ) expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0L)) }, #' If the code calls `dbBreak()`, execution of the code stops and the #' transaction is silently aborted. with_transaction_break = function(con, table_name) { dbWriteTable(con, table_name, data.frame(a = 0L), overwrite = TRUE) expect_error( dbWithTransaction( con, { dbWriteTable(con, table_name, data.frame(a = 1L), append = TRUE) dbBreak() } ), NA ) expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0L)) }, #' All side effects caused by the code with_transaction_side_effects = function(con) { expect_false(exists("a", inherits = FALSE)) #' (such as the creation of new variables) dbWithTransaction(con, a <- 42) #' propagate to the calling environment. expect_identical(get0("a", inherits = FALSE), 42) }, # NULL ) DBItest/R/spec-result-roundtrip.R0000644000176200001440000002751114157037563016407 0ustar liggesusers#' spec_result_roundtrip #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_roundtrip <- list( #' @section Specification: #' The column types of the returned data frame depend on the data returned: #' - [integer] (or coercible to an integer) for integer values between -2^31 and 2^31 - 1, data_integer = function(ctx, con) { #' with [NA] for SQL `NULL` values test_select_with_null(.ctx = ctx, con, 1L ~ equals_one, -100L ~ equals_minus_100) }, #' - [numeric] for numbers with a fractional component, data_numeric = function(ctx, con) { #' with NA for SQL `NULL` values test_select_with_null(.ctx = ctx, con, 1.5, -100.5) }, #' - [logical] for Boolean values (some backends may return an integer); data_logical = function(ctx, con) { int_values <- 1:0 values <- ctx$tweaks$logical_return(as.logical(int_values)) sql_names <- paste0("CAST(", int_values, " AS ", dbDataType(con, logical()), ")") #' with NA for SQL `NULL` values test_select_with_null(.ctx = ctx, con, .dots = setNames(values, sql_names)) }, #' - [character] for text, data_character = function(ctx, con) { values <- texts test_funs <- rep(list(has_utf8_or_ascii_encoding), length(values)) sql_names <- as.character(dbQuoteString(con, values)) #' with NA for SQL `NULL` values test_select_with_null(.ctx = ctx, con, .dots = setNames(values, sql_names)) test_select_with_null(.ctx = ctx, con, .dots = setNames(test_funs, sql_names)) }, #' - lists of [raw] for blobs data_raw = function(ctx, con) { if (isTRUE(ctx$tweaks$omit_blob_tests)) { skip("tweak: omit_blob_tests") } values <- list(is_raw_list) sql_names <- ctx$tweaks$blob_cast(quote_literal(con, list(raw(1)))) #' with [NULL] entries for SQL NULL values test_select_with_null(.ctx = ctx, con, .dots = setNames(values, sql_names)) }, #' - coercible using [as.Date()] for dates, data_date = function(ctx, con) { char_values <- paste0("2015-01-", sprintf("%.2d", 1:12)) values <- as_date_equals_to(as.Date(char_values)) sql_names <- ctx$tweaks$date_cast(char_values) #' with NA for SQL `NULL` values test_select_with_null(.ctx = ctx, con, .dots = setNames(values, sql_names)) }, #' (also applies to the return value of the SQL function `current_date`) data_date_current = function(ctx, con) { test_select_with_null( .ctx = ctx, con, "current_date" ~ is_roughly_current_date ) }, #' - coercible using [hms::as_hms()] for times, data_time = function(ctx, con) { char_values <- c("00:00:00", "12:34:56") time_values <- as_hms_equals_to(hms::as_hms(char_values)) sql_names <- ctx$tweaks$time_cast(char_values) #' with NA for SQL `NULL` values test_select_with_null(.ctx = ctx, con, .dots = setNames(time_values, sql_names)) }, #' (also applies to the return value of the SQL function `current_time`) data_time_current = function(ctx, con) { test_select_with_null( .ctx = ctx, con, "current_time" ~ coercible_to_time ) }, #' - coercible using [as.POSIXct()] for timestamps, data_timestamp = function(ctx, con) { char_values <- c("2015-10-11 00:00:00", "2015-10-11 12:34:56") time_values <- rep(list(coercible_to_timestamp), 2L) sql_names <- ctx$tweaks$timestamp_cast(char_values) #' with NA for SQL `NULL` values test_select_with_null(.ctx = ctx, con, .dots = setNames(time_values, sql_names)) }, #' (also applies to the return value of the SQL function `current_timestamp`) data_timestamp_current = function(ctx, con) { test_select_with_null( .ctx = ctx, con, "current_timestamp" ~ is_roughly_current_timestamp ) }, #' #' If dates and timestamps are supported by the backend, the following R types are #' used: #' - [Date] for dates data_date_typed = function(ctx, con) { if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } char_values <- paste0("2015-01-", sprintf("%.2d", 1:12)) values <- lapply(char_values, as_numeric_date) sql_names <- ctx$tweaks$date_cast(char_values) test_select_with_null(.ctx = ctx, con, .dots = setNames(values, sql_names)) }, #' (also applies to the return value of the SQL function `current_date`) data_date_current_typed = function(ctx, con) { if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } test_select_with_null( .ctx = ctx, con, "current_date" ~ is_roughly_current_date_typed ) }, #' - [POSIXct] for timestamps data_timestamp_typed = function(ctx, con) { if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } char_values <- c("2015-10-11 00:00:00", "2015-10-11 12:34:56") timestamp_values <- rep(list(is_timestamp), 2L) sql_names <- ctx$tweaks$timestamp_cast(char_values) test_select_with_null(.ctx = ctx, con, .dots = setNames(timestamp_values, sql_names)) }, #' (also applies to the return value of the SQL function `current_timestamp`) data_timestamp_current_typed = function(ctx, con) { if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } test_select_with_null( .ctx = ctx, con, "current_timestamp" ~ is_roughly_current_timestamp_typed ) }, #' #' R has no built-in type with lossless support for the full range of 64-bit #' or larger integers. If 64-bit integers are returned from a query, #' the following rules apply: #' - Values are returned in a container with support for the full range of #' valid 64-bit values (such as the `integer64` class of the \pkg{bit64} #' package) #' - Coercion to numeric always returns a number that is as close as possible #' to the true value data_64_bit_numeric = function(ctx, con) { char_values <- c("10000000000", "-10000000000") test_values <- as_numeric_identical_to(as.numeric(char_values)) test_select_with_null(.ctx = ctx, con, .dots = setNames(test_values, char_values)) }, #' - Loss of precision when converting to numeric gives a warning data_64_bit_numeric_warning = function(ctx, con) { char_values <- c(" 1234567890123456789", "-1234567890123456789") num_values <- as.numeric(char_values) test_values <- as_numeric_equals_to(num_values) suppressWarnings( expect_warning( test_select(.ctx = ctx, con, .dots = setNames(test_values, char_values), .add_null = "none") ) ) suppressWarnings( expect_warning( test_select(.ctx = ctx, con, .dots = setNames(test_values, char_values), .add_null = "above") ) ) suppressWarnings( expect_warning( test_select(.ctx = ctx, con, .dots = setNames(test_values, char_values), .add_null = "below") ) ) }, #' - Conversion to character always returns a lossless decimal representation #' of the data data_64_bit_lossless = function(ctx, con) { char_values <- c("1234567890123456789", "-1234567890123456789") test_values <- as_character_equals_to(char_values) test_select_with_null(.ctx = ctx, con, .dots = setNames(test_values, char_values)) }, # NULL ) test_select_with_null <- function(...) { test_select(..., .add_null = "none") test_select(..., .add_null = "above") test_select(..., .add_null = "below") } test_select <- function(con, ..., .dots = NULL, .add_null = "none", .ctx, .envir = parent.frame()) { values <- c(list(...), .dots) value_is_formula <- vapply(values, is.call, logical(1L)) names(values)[value_is_formula] <- lapply(values[value_is_formula], "[[", 2L) values[value_is_formula] <- lapply( values[value_is_formula], function(x) { eval(x[[3]], envir = .envir) } ) if (is.null(names(values))) { sql_values <- lapply(values, as.character) } else { sql_values <- names(values) } if (isTRUE(.ctx$tweaks$current_needs_parens)) { sql_values <- gsub( "^(current_(?:date|time|timestamp))$", "\\1()", sql_values ) } sql_names <- letters[seq_along(sql_values)] query <- paste( "SELECT", paste(sql_values, "as", sql_names, collapse = ", ") ) if (.add_null != "none") { query_null <- paste( "SELECT", paste("NULL as", sql_names, collapse = ", ") ) query <- c(query, query_null) if (.add_null == "above") { query <- rev(query) } query <- paste0(query, ", ", 1:2, " as id") query <- union(.ctx = .ctx, query) } rows <- check_df(dbGetQuery(con, query)) if (.add_null != "none") { rows <- rows[order(rows$id), -(length(sql_names) + 1L), drop = FALSE] if (.add_null == "above") { rows <- rows[2:1, , drop = FALSE] } } expect_identical(names(rows), sql_names) for (i in seq_along(values)) { value_or_testfun <- values[[i]] if (is.function(value_or_testfun)) { eval(bquote(expect_true(value_or_testfun(rows[1L, .(i)])))) } else { eval(bquote(expect_identical(rows[1L, .(i)], .(value_or_testfun)))) } } if (.add_null != "none") { expect_equal(nrow(rows), 2L) if (is.list(rows[[1L]])) { expect_true(is.null(rows[2L, 1L][[1L]])) } else { expect_true(is.na(rows[2L, 1L])) } } else { expect_equal(nrow(rows), 1L) } } equals_one <- function(x) { identical(as.integer(x), 1L) && identical(as.numeric(x), 1) } equals_minus_100 <- function(x) { identical(as.integer(x), -100L) && identical(as.numeric(x), -100) } all_have_utf8_or_ascii_encoding <- function(x) { all(vapply(x, has_utf8_or_ascii_encoding, logical(1L))) } has_utf8_or_ascii_encoding <- function(x) { if (Encoding(x) == "UTF-8") { TRUE } else if (Encoding(x) == "unknown") { # Characters encoded as "unknown" must be ASCII only, and remain "unknown" # after attempting to assign an encoding. From ?Encoding : # > ASCII strings will never be marked with a declared encoding, since their # > representation is the same in all supported encodings. Encoding(x) <- "UTF-8" Encoding(x) == "unknown" } else { FALSE } } is_raw_list <- function(x) { is.list(x) && is.raw(x[[1L]]) } coercible_to_date <- function(x) { x_date <- try_silent(as.Date(x)) !is.null(x_date) && all(is.na(x) == is.na(x_date)) } as_date_equals_to <- function(x) { lapply(x, function(xx) { function(value) as.Date(value) == xx }) } is_roughly_current_date <- function(x) { coercible_to_date(x) && (abs(Sys.Date() - as.Date(x)) <= 1) } coercible_to_time <- function(x) { x_hms <- try_silent(hms::as_hms(x)) !is.null(x_hms) && all(is.na(x) == is.na(x_hms)) } as_hms_equals_to <- function(x) { lapply(x, function(xx) { function(value) hms::as_hms(value) == xx }) } coercible_to_timestamp <- function(x) { x_timestamp <- try_silent(as.POSIXct(x)) !is.null(x_timestamp) && all(is.na(x) == is.na(x_timestamp)) } as_timestamp_equals_to <- function(x) { lapply(x, function(xx) { function(value) as.POSIXct(value) == xx }) } as_numeric_identical_to <- function(x) { lapply(x, function(xx) { function(value) as.numeric(value) == xx }) } as_numeric_equals_to <- function(x) { lapply(x, function(xx) { function(value) isTRUE(all.equal(as.numeric(value), xx)) }) } as_character_equals_to <- function(x) { lapply(x, function(xx) { function(value) as.character(value) == xx }) } is_roughly_current_timestamp <- function(x) { coercible_to_timestamp(x) && (Sys.time() - as.POSIXct(x, tz = "UTC") <= hms::hms(2)) } is_date <- function(x) { inherits(x, "Date") } is_roughly_current_date_typed <- function(x) { is_date(x) && (abs(Sys.Date() - x) <= 1) } is_timestamp <- function(x) { inherits(x, "POSIXct") } is_roughly_current_timestamp_typed <- function(x) { is_timestamp(x) && (Sys.time() - x <= hms::hms(2)) } as_numeric_date <- function(d) { d <- as.Date(d) structure(as.numeric(unclass(d)), class = class(d)) } quote_literal <- function(con, x) { DBI::dbQuoteLiteral(con, x) } DBItest/R/spec-meta-bind-runner.R0000644000176200001440000000737014157037563016215 0ustar liggesusersrun_bind_tester <- list() #' spec_meta_bind #' @family meta specifications #' @name spec_meta_bind #' @usage NULL #' @format NULL #' @keywords NULL #' @section Specification: #' \pkg{DBI} clients execute parametrized statements as follows: #' run_bind_tester$fun <- function() { if ((extra_obj$requires_names() %in% TRUE) && is.null(names(placeholder_fun(1)))) { # test only valid for named placeholders return() } if ((extra_obj$requires_names() %in% FALSE) && !is.null(names(placeholder_fun(1)))) { # test only valid for unnamed placeholders return() } #' 1. Call [dbSendQuery()] or [dbSendStatement()] with a query or statement #' that contains placeholders, #' store the returned [DBIResult-class] object in a variable. #' Mixing placeholders (in particular, named and unnamed ones) is not #' recommended. if (is_query()) { res <- send_query() } else { res <- send_statement() } #' It is good practice to register a call to [dbClearResult()] via #' [on.exit()] right after calling `dbSendQuery()` or `dbSendStatement()` #' (see the last enumeration item). if (extra_obj$is_premature_clear()) { dbClearResult(res) } else { on.exit(expect_error(dbClearResult(res), NA)) } #' Until `dbBind()` has been called, the returned result set object has the #' following behavior: #' - [dbFetch()] raises an error (for `dbSendQuery()`) if (is_query()) expect_error(dbFetch(res)) #' - [dbGetRowCount()] returns zero (for `dbSendQuery()`) if (is_query()) expect_equal(dbGetRowCount(res), 0) #' - [dbGetRowsAffected()] returns an integer `NA` (for `dbSendStatement()`) if (!is_query()) expect_identical(dbGetRowsAffected(res), NA_integer_) #' - [dbIsValid()] returns `TRUE` expect_true(dbIsValid(res)) #' - [dbHasCompleted()] returns `FALSE` expect_false(dbHasCompleted(res)) #' 1. Construct a list with parameters #' that specify actual values for the placeholders. bind_values <- values #' The list must be named or unnamed, #' depending on the kind of placeholders used. #' Named values are matched to named parameters, unnamed values #' are matched by position in the list of parameters. if (!is.null(names(placeholder_fun(1)))) { names(bind_values) <- names(placeholder_fun(length(bind_values))) } #' All elements in this list must have the same lengths and contain values #' supported by the backend; a [data.frame] is internally stored as such #' a list. #' The parameter list is passed to a call to `dbBind()` on the `DBIResult` #' object. bind(res, bind_values) if (!is.na(extra_obj$bind_error())) { return() } # Safety net: returning early if dbBind() should have thrown an error but # didn't if (!identical(bind_values, extra_obj$patch_bind_values(bind_values))) { return() } if (extra_obj$is_premature_clear()) { return() } #' 1. Retrieve the data or the number of affected rows from the `DBIResult` object. retrieve <- function() { #' - For queries issued by `dbSendQuery()`, #' call [dbFetch()]. if (is_query()) { rows <- check_df(dbFetch(res)) compare(rows) } else { #' - For statements issued by `dbSendStatements()`, #' call [dbGetRowsAffected()]. #' (Execution begins immediately after the `dbBind()` call, #' the statement is processed entirely before the function returns.) rows_affected <- dbGetRowsAffected(res) compare_affected(rows_affected, values) } } if (!extra_obj$is_untouched()) retrieve() #' 1. Repeat 2. and 3. as necessary. if (extra_obj$is_repeated()) { bind(res, bind_values) retrieve() } #' 1. Close the result set via [dbClearResult()]. } DBItest/R/spec-meta-get-rows-affected.R0000644000176200001440000000352014157037563017271 0ustar liggesusers#' spec_meta_get_rows_affected #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_get_rows_affected <- list( get_rows_affected_formals = function() { # expect_equal(names(formals(dbGetRowsAffected)), c("res", "...")) }, #' @return #' `dbGetRowsAffected()` returns a scalar number (integer or numeric), #' the number of rows affected by a data manipulation statement rows_affected_statement = function(con, table_name) { dbWriteTable(con, table_name, data.frame(a = 1:10)) query <- paste0( "DELETE FROM ", dbQuoteIdentifier(con, table_name), " ", "WHERE a < 6" ) #' issued with [dbSendStatement()]. res <- local_result(dbSendStatement(con, query)) rc <- dbGetRowsAffected(res) #' The value is available directly after the call expect_equal(rc, 5L) expect_warning(check_df(dbFetch(res))) rc <- dbGetRowsAffected(res) #' and does not change after calling [dbFetch()]. expect_equal(rc, 5L) }, # rows_affected_query = function(con) { query <- trivial_query() #' For queries issued with [dbSendQuery()], res <- local_result(dbSendQuery(con, query)) rc <- dbGetRowsAffected(res) #' zero is returned before expect_equal(rc, 0L) check_df(dbFetch(res)) rc <- dbGetRowsAffected(res) #' and after the call to `dbFetch()`. expect_equal(rc, 0L) }, #' #' @section Failure modes: get_rows_affected_error = function(con, table_name) { query <- paste0( "CREATE TABLE ", dbQuoteIdentifier(con, table_name), " (a integer)" ) res <- dbSendStatement(con, query) dbClearResult(res) #' Attempting to get the rows affected for a result set cleared with #' [dbClearResult()] gives an error. expect_error(dbGetRowsAffected(res)) }, # NULL ) DBItest/R/spec-sql-append-table.R0000644000176200001440000004512014157037563016172 0ustar liggesusers#' spec_sql_append_table #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_append_table <- list( append_table_formals = function() { # expect_equal(names(formals(dbAppendTable)), c("conn", "name", "value", "...", "row.names")) }, #' @return #' `dbAppendTable()` returns a append_table_return = function(con, table_name) { test_in <- trivial_df() dbCreateTable(con, table_name, test_in) ret <- dbAppendTable(con, table_name, test_in) #' scalar expect_equal(length(ret), 1) #' numeric. expect_true(is.numeric(ret)) }, #' #' @section Failure modes: #' If the table does not exist, append_table_missing = function(con, table_name) { expect_false(dbExistsTable(con, table_name)) test_in <- trivial_df() expect_error(dbAppendTable(con, table_name, data.frame(a = 2L))) }, #' or the new data in `values` is not a data frame or has different column names, #' an error is raised; the remote table remains unchanged. append_table_invalid_value = function(con, table_name) { test_in <- trivial_df() dbCreateTable(con, table_name, test_in) expect_error(dbAppendTable(con, table_name, unclass(test_in))) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in[0, , drop = FALSE]) }, append_table_append_incompatible = function(con, table_name) { test_in <- trivial_df() dbCreateTable(con, table_name, test_in) dbAppendTable(con, table_name, test_in) expect_error(dbAppendTable(con, table_name, data.frame(b = 2L))) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, #' #' An error is raised when calling this method for a closed append_table_closed_connection = function(ctx, closed_con) { expect_error(dbAppendTable(closed_con, "test", data.frame(a = 1))) }, #' or invalid connection. append_table_invalid_connection = function(ctx, invalid_con) { expect_error(dbAppendTable(invalid_con, "test", data.frame(a = 1))) }, #' An error is also raised append_table_error = function(con, table_name) { test_in <- data.frame(a = 1L) #' if `name` cannot be processed with [dbQuoteIdentifier()] expect_error(dbAppendTable(con, NA, test_in)) #' or if this results in a non-scalar. expect_error(dbAppendTable(con, c("test", "test"), test_in)) #' Invalid values for the `row.names` argument #' (non-scalars, expect_error(dbAppendTable(con, "test", test_in, row.names = letters)) #' unsupported data types, expect_error(dbAppendTable(con, "test", test_in, row.names = list(1L))) #' `NA`) expect_error(dbAppendTable(con, "test", test_in, row.names = NA)) #' also raise an error. }, #' #' @section Specification: #' SQL keywords can be used freely in table names, column names, and data. append_roundtrip_keywords = function(con) { tbl_in <- data.frame( select = "unique", from = "join", where = "order", stringsAsFactors = FALSE ) test_table_roundtrip(use_append = TRUE, con, tbl_in, name = "exists") }, #' Quotes, commas, spaces, and other special characters such as newlines and tabs, #' can also be used in the data, append_roundtrip_quotes = function(ctx, con, table_name) { tbl_in <- data.frame( as.character(dbQuoteString(con, "")), as.character(dbQuoteIdentifier(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\Nb", "a\\tb", "a\\rb", "a\\bb", "a\\Zb", stringsAsFactors = FALSE ) names(tbl_in) <- letters[seq_along(tbl_in)] test_table_roundtrip(con, tbl_in, use_append = TRUE) }, #' and, if the database supports non-syntactic identifiers, #' also for table names append_roundtrip_quotes_table_names = function(ctx, con) { if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } table_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\Nb", "a\\tb", "a\\rb", "a\\bb", "a\\Zb" ) tbl_in <- trivial_df() for (table_name in table_names) { test_table_roundtrip_one(con, tbl_in, use_append = TRUE, .add_na = FALSE) } }, #' and column names. append_roundtrip_quotes_column_names = function(ctx, con) { if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } column_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\nb", "a\\tb", "a\\rb", "a\\bb", "a\\zb" ) tbl_in <- trivial_df(length(column_names), column_names) test_table_roundtrip_one(con, tbl_in, use_append = TRUE, .add_na = FALSE) }, #' #' The following data types must be supported at least, #' and be read identically with [dbReadTable()]: #' - integer append_roundtrip_integer = function(con) { tbl_in <- data.frame(a = c(1:5)) test_table_roundtrip(use_append = TRUE, con, tbl_in) }, #' - numeric append_roundtrip_numeric = function(con) { tbl_in <- data.frame(a = c(seq(1, 3, by = 0.5))) test_table_roundtrip(use_append = TRUE, con, tbl_in) #' (the behavior for `Inf` and `NaN` is not specified) }, #' - logical append_roundtrip_logical = function(ctx, con) { tbl_in <- data.frame(a = c(TRUE, FALSE, NA)) tbl_exp <- tbl_in tbl_exp$a <- ctx$tweaks$logical_return(tbl_exp$a) test_table_roundtrip(use_append = TRUE, con, tbl_in, tbl_exp) }, #' - `NA` as NULL append_roundtrip_null = function(con) { tbl_in <- data.frame(a = NA) test_table_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { tbl_out$a <- as.logical(tbl_out$a) # Plain NA is of type logical tbl_out } ) }, #' - 64-bit values (using `"bigint"` as field type); the result can be append_roundtrip_64_bit_numeric = function(ctx, con) { tbl_in <- data.frame(a = c(-1e14, 1e15)) test_table_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { #' - converted to a numeric, which may lose precision, tbl_out$a <- as.numeric(tbl_out$a) tbl_out }, field.types = c(a = "BIGINT") ) }, # append_roundtrip_64_bit_character = function(ctx, con) { tbl_in <- data.frame(a = c(-1e14, 1e15)) tbl_exp <- tbl_in tbl_exp$a <- format(tbl_exp$a, scientific = FALSE) test_table_roundtrip( use_append = TRUE, con, tbl_in, tbl_exp, transform = function(tbl_out) { #' - converted a character vector, which gives the full decimal #' representation tbl_out$a <- as.character(tbl_out$a) tbl_out }, field.types = c(a = "BIGINT") ) }, # append_roundtrip_64_bit_roundtrip = function(con, table_name) { tbl_in <- data.frame(a = c(-1e14, 1e15)) dbWriteTable(con, table_name, tbl_in, field.types = c(a = "BIGINT")) tbl_out <- dbReadTable(con, table_name) #' - written to another table and read again unchanged test_table_roundtrip(use_append = TRUE, con, tbl_out, tbl_expected = tbl_out) }, #' - character (in both UTF-8 append_roundtrip_character = function(con) { tbl_in <- data.frame( id = seq_along(texts), a = c(texts), stringsAsFactors = FALSE ) test_table_roundtrip(use_append = TRUE, con, tbl_in) }, #' and native encodings), append_roundtrip_character_native = function(con) { tbl_in <- data.frame( a = c(enc2native(texts)), stringsAsFactors = FALSE ) test_table_roundtrip(use_append = TRUE, con, tbl_in) }, #' supporting empty strings append_roundtrip_character_empty = function(con) { tbl_in <- data.frame( a = c("", "a"), stringsAsFactors = FALSE ) test_table_roundtrip(use_append = TRUE, con, tbl_in) }, #' (before and after non-empty strings) append_roundtrip_character_empty_after = function(con) { tbl_in <- data.frame( a = c("a", ""), stringsAsFactors = FALSE ) test_table_roundtrip(use_append = TRUE, con, tbl_in) }, #' - factor (returned as character, append_roundtrip_factor = function(con) { tbl_in <- data.frame( a = factor(c(texts)) ) tbl_exp <- tbl_in tbl_exp$a <- as.character(tbl_exp$a) #' with a warning) suppressWarnings( expect_warning( test_table_roundtrip(use_append = TRUE, con, tbl_in, tbl_exp) ) ) }, #' - list of raw append_roundtrip_raw = function(ctx, con) { #' (if supported by the database) if (isTRUE(ctx$tweaks$omit_blob_tests)) { skip("tweak: omit_blob_tests") } tbl_in <- data.frame(id = 1L, a = I(list(as.raw(0:10)))) tbl_exp <- tbl_in tbl_exp$a <- blob::as_blob(unclass(tbl_in$a)) test_table_roundtrip( use_append = TRUE, con, tbl_in, tbl_exp, transform = function(tbl_out) { tbl_out$a <- blob::as_blob(tbl_out$a) tbl_out } ) }, #' - objects of type [blob::blob] append_roundtrip_blob = function(ctx, con) { #' (if supported by the database) if (isTRUE(ctx$tweaks$omit_blob_tests)) { skip("tweak: omit_blob_tests") } tbl_in <- data.frame(id = 1L, a = blob::blob(as.raw(0:10))) test_table_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { tbl_out$a <- blob::as_blob(tbl_out$a) tbl_out } ) }, #' - date append_roundtrip_date = function(ctx, con) { #' (if supported by the database; if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } #' returned as `Date`) tbl_in <- data.frame(a = as_numeric_date(c(Sys.Date() + 1:5))) test_table_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { expect_is(unclass(tbl_out$a), "numeric") tbl_out } ) }, #' also for dates prior to 1970 or 1900 or after 2038 append_roundtrip_date_extended = function(ctx, con) { if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } tbl_in <- data.frame(a = as_numeric_date(c( "1811-11-11", "1899-12-31", "1900-01-01", "1950-05-05", "1969-12-31", "1970-01-01", "2037-01-01", "2038-01-01", "2040-01-01", "2999-09-09" ))) test_table_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { expect_type(unclass(tbl_out$a), "double") tbl_out } ) }, #' - time append_roundtrip_time = function(ctx, con) { #' (if supported by the database; if (!isTRUE(ctx$tweaks$time_typed)) { skip("tweak: !time_typed") } tbl_in <- data.frame(a = hms::hms(minutes = 1:5)) tbl_in$b <- .difftime(as.numeric(tbl_in$a) / 60, "mins") tbl_exp <- tbl_in tbl_exp$a <- hms::as_hms(tbl_exp$a) tbl_exp$b <- hms::as_hms(tbl_exp$b) test_table_roundtrip( con, tbl_in, tbl_exp, transform = function(tbl_out) { #' returned as objects that inherit from `difftime`) expect_s3_class(tbl_out$a, "difftime") expect_s3_class(tbl_out$b, "difftime") tbl_out$a <- hms::as_hms(tbl_out$a) tbl_out$b <- hms::as_hms(tbl_out$b) tbl_out } ) }, #' - timestamp append_roundtrip_timestamp = function(ctx, con) { #' (if supported by the database; if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } #' returned as `POSIXct` local <- round(Sys.time()) + c( 1, 60, 3600, 86400, 86400 * 90, 86400 * 180, 86400 * 270, 1e9, 5e9 ) attr(local, "tzone") <- "" tbl_in <- data.frame(id = seq_along(local)) tbl_in$local <- local tbl_in$gmt <- lubridate::with_tz(local, tzone = "GMT") tbl_in$pst8pdt <- lubridate::with_tz(local, tzone = "PST8PDT") tbl_in$utc <- lubridate::with_tz(local, tzone = "UTC") #' respecting the time zone but not necessarily preserving the #' input time zone), test_table_roundtrip( use_append = TRUE, con, tbl_in, transform = function(out) { dates <- vapply(out, inherits, "POSIXt", FUN.VALUE = logical(1L)) tz <- toupper(names(out)) tz[tz == "LOCAL"] <- "" out[dates] <- Map(lubridate::with_tz, out[dates], tz[dates]) out } ) }, #' also for timestamps prior to 1970 or 1900 or after 2038 append_roundtrip_timestamp_extended = function(ctx, con) { if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } local <- as.POSIXct(c( "1811-11-11", "1899-12-31", "1900-01-01", "1950-05-05", "1969-12-31", "1970-01-01", "2037-01-01", "2038-01-01", "2040-01-01", "2999-09-09" )) attr(local, "tzone") <- "" tbl_in <- data.frame(id = seq_along(local)) tbl_in$local <- local tbl_in$gmt <- lubridate::with_tz(local, tzone = "GMT") tbl_in$pst8pdt <- lubridate::with_tz(local, tzone = "PST8PDT") tbl_in$utc <- lubridate::with_tz(local, tzone = "UTC") #' respecting the time zone but not necessarily preserving the #' input time zone) test_table_roundtrip( use_append = TRUE, con, tbl_in, transform = function(out) { dates <- vapply(out, inherits, "POSIXt", FUN.VALUE = logical(1L)) tz <- toupper(names(out)) tz[tz == "LOCAL"] <- "" out[dates] <- Map(lubridate::with_tz, out[dates], tz[dates]) out } ) }, #' #' Mixing column types in the same table is supported. append_roundtrip_mixed = function(con) { data <- list("a", 1L, 1.5) data <- lapply(data, c, NA) expanded <- expand.grid(a = data, b = data, c = data) tbl_in_list <- lapply( seq_len(nrow(expanded)), function(i) { as.data.frame(lapply(expanded[i, ], unlist, recursive = FALSE)) } ) lapply(tbl_in_list, test_table_roundtrip, con = con) }, #' @section Specification: #' The `name` argument is processed as follows, append_table_name = function(ctx, con) { #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- trivial_df() local_remove_test_table(con, table_name) #' - If an unquoted table name as string: `dbAppendTable()` will do the quoting, dbCreateTable(con, table_name, test_in) dbAppendTable(con, table_name, test_in) test_out <- check_df(dbReadTable(con, dbQuoteIdentifier(con, table_name))) expect_equal_df(test_out, test_in) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` } }, #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done append_table_name_quoted = function(ctx, con) { if (as.package_version(ctx$tweaks$dbitest_version) < "1.7.2") { skip(paste0("tweak: dbitest_version: ", ctx$tweaks$dbitest_version)) } #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- trivial_df() local_remove_test_table(con, table_name) dbCreateTable(con, dbQuoteIdentifier(con, table_name), test_in) dbAppendTable(con, dbQuoteIdentifier(con, table_name), test_in) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) } }, #' #' #' The `row.names` argument must be `NULL`, the default value. append_table_row_names_false = function(con, table_name) { mtcars_in <- datasets::mtcars dbCreateTable(con, table_name, mtcars_in) dbAppendTable(con, table_name, mtcars_in) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_false("row_names" %in% names(mtcars_out)) expect_equal_df(mtcars_out, unrowname(mtcars_in)) }, #' Row names are ignored. append_table_row_names_ignore = function(con, table_name) { mtcars_in <- datasets::mtcars dbCreateTable(con, table_name, mtcars_in) dbAppendTable(con, table_name, mtcars_in, row.names = NULL) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_false("row_names" %in% names(mtcars_out)) expect_equal_df(mtcars_out, unrowname(mtcars_in)) }, # append_table_row_names_non_null = function(con, table_name) { #' #' @section Failure modes: #' Passing a `value` argument different to `NULL` to the `row.names` argument mtcars_in <- datasets::mtcars dbCreateTable(con, table_name, mtcars_in) #' (in particular `TRUE`, expect_error(dbAppendTable(con, table_name, mtcars_in, row.names = TRUE)) #' `NA`, expect_error(dbAppendTable(con, table_name, mtcars_in, row.names = NA)) #' and a string) expect_error(dbAppendTable(con, table_name, mtcars_in, row.names = "make_model")) #' raises an error. }, #' #' @section Specification: #' The `value` argument must be a data frame append_table_value_df = function(con, table_name) { test_in <- trivial_df() dbCreateTable(con, table_name, test_in) dbAppendTable(con, table_name, test_in) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, #' with a subset of the columns of the existing table. append_table_value_subset = function(ctx, con, table_name) { test_in <- trivial_df(3, letters[1:3]) dbCreateTable(con, table_name, test_in) dbAppendTable(con, table_name, test_in[2]) test_out <- check_df(dbReadTable(con, table_name)) test_in[c(1, 3)] <- NA_real_ expect_equal_df(test_out, test_in) }, #' The order of the columns does not matter. append_table_value_shuffle = function(ctx, con, table_name) { test_in <- trivial_df(3, letters[1:3]) dbCreateTable(con, table_name, test_in) dbAppendTable(con, table_name, test_in[c(2, 3, 1)]) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, # append_table_value_shuffle_subset = function(ctx, con, table_name) { test_in <- trivial_df(4, letters[1:4]) dbCreateTable(con, table_name, test_in) dbAppendTable(con, table_name, test_in[c(4, 1, 3)]) test_out <- check_df(dbReadTable(con, table_name)) test_in[2] <- NA_real_ expect_equal_df(test_out, test_in) }, # NULL ) DBItest/R/spec-meta-bind-.R0000644000176200001440000001357614157035042014756 0ustar liggesusers# Helpers ----------------------------------------------------------------- test_select_bind <- function(con, ctx, ...) { lapply( get_placeholder_funs(ctx), test_select_bind_one, con = con, is_null_check = ctx$tweaks$is_null_check, ... ) } get_placeholder_funs <- function(ctx) { placeholder_fun <- ctx$tweaks$placeholder_pattern if (is.character(placeholder_fun)) { placeholder_fun <- lapply(placeholder_fun, make_placeholder_fun) } else if (is.function(placeholder_fun)) { placeholder_fun <- list(placeholder_fun) } if (length(placeholder_fun) == 0) { skip("Use the placeholder_pattern tweak, or skip all 'bind_.*' tests") } placeholder_fun } test_select_bind_one <- function(con, placeholder_fun, is_null_check, values, query = TRUE, extra = "none", cast_fun = identity) { bind_tester <- BindTester$new(con) bind_tester$placeholder_fun <- placeholder_fun bind_tester$is_null_check <- is_null_check bind_tester$cast_fun <- cast_fun bind_tester$values <- values bind_tester$query <- query bind_tester$extra_obj <- new_extra_imp(extra) bind_tester$run() } new_extra_imp <- function(extra) { if (is.environment(extra)) { extra$new() } else if (length(extra) == 0) { new_extra_imp_one("none") } else if (length(extra) == 1) { new_extra_imp_one(extra) } else { stop("need BindTesterExtraMulti") # BindTesterExtraMulti$new(lapply(extra, new_extra_imp_one)) } } new_extra_imp_one <- function(extra) { extra_imp <- switch(extra, none = BindTesterExtra, stop("Unknown extra: ", extra, call. = FALSE) ) extra_imp$new() } # BindTester -------------------------------------------------------------- BindTester <- R6::R6Class( "BindTester", portable = FALSE, # public = list( initialize = function(con) { self$con <- con }, run = run_bind_tester$fun, # con = NULL, placeholder_fun = NULL, is_null_check = NULL, cast_fun = NULL, values = NULL, query = TRUE, extra_obj = NULL ), # private = list( is_query = function() { query }, # send_query = function() { ret_values <- trivial_values(2) placeholder <- placeholder_fun(length(values)) is_na <- vapply(values, is_na_or_null, logical(1)) placeholder_values <- vapply(values, function(x) quote_literal(con, x[1]), character(1)) result_names <- letters[seq_along(values)] query <- paste0( "SELECT ", paste0( "CASE WHEN ", ifelse( is_na, paste0("(", is_null_check(cast_fun(placeholder)), ")"), paste0("(", cast_fun(placeholder), " = ", placeholder_values, ")") ), " THEN ", ret_values[[1]], " ELSE ", ret_values[[2]], " END", " AS ", result_names, collapse = ", " ) ) dbSendQuery(con, query) }, # send_statement = function() { data <- data.frame(a = rep(1:5, 1:5)) data$b <- seq_along(data$a) table_name <- random_table_name() dbWriteTable(con, table_name, data, temporary = TRUE) value_names <- letters[seq_along(values)] placeholder <- placeholder_fun(length(values)) statement <- paste0( "UPDATE ", dbQuoteIdentifier(con, table_name), " SET b = b + 1 WHERE ", paste(value_names, " = ", placeholder, collapse = " AND ") ) dbSendStatement(con, statement) }, # bind = function(res, bind_values) { bind_values <- extra_obj$patch_bind_values(bind_values) bind_error <- extra_obj$bind_error() expect_error(bind_res <- withVisible(dbBind(res, bind_values)), bind_error) if (is.na(bind_error) && exists("bind_res")) { extra_obj$check_return_value(bind_res, res) } invisible() }, # compare = function(rows) { expect_equal(nrow(rows), length(values[[1]])) if (nrow(rows) > 0) { result_names <- letters[seq_along(values)] expected <- c(trivial_values(1), rep(trivial_values(2)[[2]], nrow(rows) - 1)) all_expected <- rep(list(expected), length(values)) result <- as.data.frame(setNames(all_expected, result_names)) expect_equal(rows, result) } }, # compare_affected = function(rows_affected, values) { expect_equal(rows_affected, sum(values[[1]])) } ) ) # make_placeholder_fun ---------------------------------------------------- #' Create a function that creates n placeholders #' #' For internal use by the `placeholder_format` tweak. #' #' @param pattern `[character(1)]`\cr Any character, optionally followed by `1` or `name`. Examples: `"?"`, `"$1"`, `":name"` #' #' @return `[function(n)]`\cr A function with one argument `n` that #' returns a vector of length `n` with placeholders of the specified format. #' #' @keywords internal #' @examples #' body(DBItest:::make_placeholder_fun("?")) #' DBItest:::make_placeholder_fun("?")(2) #' DBItest:::make_placeholder_fun("$1")(3) #' DBItest:::make_placeholder_fun(":name")(5) make_placeholder_fun <- function(pattern) { format_rx <- "^(.)(.*)$" character <- gsub(format_rx, "\\1", pattern) kind <- gsub(format_rx, "\\2", pattern) if (character == "") { stop("placeholder pattern must have at least one character", call. = FALSE) } if (kind == "") { eval(bquote( function(n) rep(.(character), n) )) } else if (kind == "1") { eval(bquote( function(n) paste0(.(character), seq_len(n)) )) } else if (kind == "name") { eval(bquote( function(n) { l <- letters[seq_len(n)] stats::setNames(paste0(.(character), l), l) } )) } else { stop("Pattern must be any character, optionally followed by 1 or name. Examples: $1, :name", call. = FALSE) } } is_na_or_null <- function(x) { identical(x, list(NULL)) || any(is.na(x)) } DBItest/R/spec-compliance.R0000644000176200001440000000011614157034343015140 0ustar liggesusers#' @format NULL spec_compliance <- c( spec_compliance_methods, # NULL ) DBItest/R/spec-meta-get-statement.R0000644000176200001440000000232214157037563016543 0ustar liggesusers#' spec_meta_get_statement #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_get_statement <- list( get_statement_formals = function() { # expect_equal(names(formals(dbGetStatement)), c("res", "...")) }, #' @return #' `dbGetStatement()` returns a string, the query used in get_statement_query = function(con) { query <- trivial_query() #' either [dbSendQuery()] res <- local_result(dbSendQuery(con, query)) s <- dbGetStatement(res) expect_type(s, "character") expect_identical(s, query) }, # get_statement_statement = function(con, table_name) { query <- paste0("CREATE TABLE ", table_name, " (a integer)") #' or [dbSendStatement()]. res <- local_result(dbSendStatement(con, query)) s <- dbGetStatement(res) expect_type(s, "character") expect_identical(s, query) }, #' #' @section Failure modes: get_statement_error = function(con) { res <- dbSendQuery(con, trivial_query()) dbClearResult(res) #' Attempting to query the statement for a result set cleared with #' [dbClearResult()] gives an error. expect_error(dbGetStatement(res)) }, # NULL ) DBItest/R/spec-connection-data-type.R0000644000176200001440000000020014157034343017045 0ustar liggesusersspec_connection_data_type <- list( data_type_connection = function(ctx, con) { test_data_type(ctx, con) }, # NULL ) DBItest/R/utf8.R0000644000176200001440000000050414157034343012765 0ustar liggesuserstext_cyrillic <- "\u041a\u0438\u0440\u0438\u043b\u043b" text_latin <- "M\u00fcller" text_latin_encoded <- iconv(text_latin, from = "UTF-8", to = "latin1") text_chinese <- "\u6211\u662f\u8c01" text_ascii <- iconv("ASCII", to = "ASCII") texts <- c(text_cyrillic, text_latin, text_latin_encoded, text_chinese, text_ascii) DBItest/R/spec-.R0000644000176200001440000000452614157034343013116 0ustar liggesusers# reverse order # Script to create new spec files from subspec names read from clipboard: # xclip -out -se c | sed 's/,//' | for i in $(cat); do f=$(echo $i | sed 's/_/-/g;s/$/.R/'); echo "$i <- list(" > R/$f; echo ")" >> R/$f; echo "#' @include $f"; done | tac # # Example input: # test_xxx_1, # test_xxx_2, # # Output: Files R/test-xxx-1.R and R/test-xxx-2.R, and @include directives to stdout ##### All #' @include spec-all.R ##### Stress #' @include spec-stress.R #' @include spec-stress-connection.R ##### Aggregators #' @include spec-compliance.R #' @include spec-transaction.R #' @include spec-meta.R #' @include spec-sql.R #' @include spec-result.R #' @include spec-connection.R #' @include spec-driver.R ##### Later #' @include spec-meta-get-info-result.R #' @include spec-meta-column-info.R #' @include spec-sql-list-fields.R #' @include spec-connection-get-info.R #' @include spec-driver-get-info.R ##### Method specs #' @include spec-transaction-with-transaction.R #' @include spec-transaction-begin-commit-rollback.R #' @include spec-meta-get-rows-affected.R #' @include spec-meta-get-row-count.R #' @include spec-meta-get-statement.R #' @include spec-meta-has-completed.R #' @include spec-meta-is-valid.R #' @include spec-meta-bind-.R #' @include spec-meta-bind.R #' @include spec-meta-bind-tester-extra.R #' @include spec-meta-bind-runner.R #' @include spec-sql-list-objects.R #' @include spec-sql-remove-table.R #' @include spec-sql-exists-table.R #' @include spec-sql-list-tables.R #' @include spec-sql-write-table.R #' @include spec-sql-append-table.R #' @include spec-sql-create-table.R #' @include spec-sql-read-table.R #' @include spec-sql-unquote-identifier.R #' @include spec-sql-quote-identifier.R #' @include spec-sql-quote-literal.R #' @include spec-sql-quote-string.R #' @include spec-result-execute.R #' @include spec-result-send-statement.R #' @include spec-result-get-query.R #' @include spec-result-clear-result.R #' @include spec-result-roundtrip.R #' @include spec-result-fetch.R #' @include spec-result-send-query.R #' @include spec-connection-disconnect.R #' @include spec-driver-connect.R #' @include spec-result-create-table-with-data-type.R #' @include spec-connection-data-type.R #' @include spec-driver-data-type.R ##### Soft specs #' @include spec-driver-constructor.R #' @include spec-compliance-methods.R #' @include spec-getting-started.R NULL DBItest/R/spec-stress-connection.R0000644000176200001440000000137714157034343016520 0ustar liggesusers#' @format NULL #' @importFrom withr with_output_sink #' @section Connection: #' \subsection{Stress tests}{ spec_stress_connection <- list( #' Open 50 simultaneous connections simultaneous_connections = function(ctx) { cons <- list() on.exit(try_silent(lapply(cons, dbDisconnect)), add = TRUE) for (i in seq_len(50L)) { cons <- c(cons, connect(ctx)) } inherit_from_connection <- vapply(cons, is, class2 = "DBIConnection", logical(1)) expect_true(all(inherit_from_connection)) }, #' Open and close 50 connections stress_connections = function(ctx) { for (i in seq_len(50L)) { con <- connect(ctx) expect_s4_class(con, "DBIConnection") expect_error(dbDisconnect(con), NA) } }, #' } NULL ) DBItest/R/spec-transaction.R0000644000176200001440000000020314157034343015350 0ustar liggesusers#' @format NULL spec_transaction <- c( spec_transaction_begin_commit_rollback, spec_transaction_with_transaction, # NULL ) DBItest/R/spec-sql-read-table.R0000644000176200001440000002236514157037563015644 0ustar liggesusers#' spec_sql_read_table #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_read_table <- list( read_table_formals = function() { # expect_equal(names(formals(dbReadTable)), c("conn", "name", "...")) }, #' @return #' `dbReadTable()` returns a data frame that contains the complete data #' from the remote table, effectively the result of calling [dbGetQuery()] #' with `SELECT * FROM `. read_table = function(ctx, con, table_name) { penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins_in) }, #' #' @section Failure modes: #' An error is raised if the table does not exist. read_table_missing = function(con, table_name) { expect_error(dbReadTable(con, table_name)) }, #' @return #' An empty table is returned as a data frame with zero rows. read_table_empty = function(ctx, con, table_name) { penguins_in <- get_penguins(ctx)[integer(), ] dbWriteTable(con, table_name, penguins_in) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal(nrow(penguins_out), 0L) expect_equal_df(penguins_out, penguins_in) }, #' #' The presence of [rownames] depends on the `row.names` argument, #' see [sqlColumnToRownames()] for details: read_table_row_names_false = function(con) { #' - If `FALSE` or `NULL`, the returned data frame doesn't have row names. for (row.names in list(FALSE, NULL)) { table_name <- random_table_name() local_remove_test_table(con, table_name) mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = TRUE) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = row.names)) expect_true("row_names" %in% names(mtcars_out)) expect_true(all(mtcars_out$row_names %in% rownames(mtcars_in))) expect_true(all(rownames(mtcars_in) %in% mtcars_out$row_names)) expect_equal_df(mtcars_out[names(mtcars_out) != "row_names"], unrowname(mtcars_in)) } }, # read_table_row_names_true_exists = function(con, table_name) { #' - If `TRUE`, a column named "row_names" is converted to row names. row.names <- TRUE mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = NA) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = row.names)) expect_equal_df(mtcars_out, mtcars_in) }, #' #' @section Failure modes: #' An error is raised if `row.names` is `TRUE` and no "row_names" column exists, read_table_row_names_true_missing = function(ctx, con, table_name) { row.names <- TRUE penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in, row.names = NA) expect_error(dbReadTable(con, table_name, row.names = row.names)) }, # read_table_row_names_na_exists = function(con, table_name) { #' @return #' - If `NA`, a column named "row_names" is converted to row names if it exists, row.names <- NA mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = TRUE) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = row.names)) expect_equal_df(mtcars_out, mtcars_in) }, # read_table_row_names_na_missing = function(ctx, con, table_name) { #' otherwise no translation occurs. row.names <- NA penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in, row.names = FALSE) penguins_out <- check_df(dbReadTable(con, table_name, row.names = row.names)) expect_equal_df(penguins_out, penguins_in) }, # read_table_row_names_string_exists = function(con, table_name) { #' - If a string, this specifies the name of the column in the remote table #' that contains the row names. row.names <- "make_model" mtcars_in <- datasets::mtcars mtcars_in$make_model <- rownames(mtcars_in) mtcars_in <- unrowname(mtcars_in) dbWriteTable(con, table_name, mtcars_in, row.names = FALSE) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = row.names)) expect_false("make_model" %in% names(mtcars_out)) expect_true(all(mtcars_in$make_model %in% rownames(mtcars_out))) expect_true(all(rownames(mtcars_out) %in% mtcars_in$make_model)) expect_equal_df(unrowname(mtcars_out), mtcars_in[names(mtcars_in) != "make_model"]) }, #' #' @section Failure modes: #' An error is raised if `row.names` is set to a string and and no corresponding column exists. read_table_row_names_string_missing = function(ctx, con, table_name) { row.names <- "missing" penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in, row.names = FALSE) expect_error(dbReadTable(con, table_name, row.names = row.names)) }, read_table_row_names_default = function(con, table_name) { #' @return #' The default is `row.names = FALSE`. #' mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = TRUE) mtcars_out <- check_df(dbReadTable(con, table_name)) expect_true("row_names" %in% names(mtcars_out)) expect_true(all(mtcars_out$row_names %in% rownames(mtcars_in))) expect_true(all(rownames(mtcars_in) %in% mtcars_out$row_names)) expect_equal_df(mtcars_out[names(mtcars_out) != "row_names"], unrowname(mtcars_in)) }, # read_table_check_names = function(ctx, con, table_name) { #' #' If the database supports identifiers with special characters, if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } #' the columns in the returned data frame are converted to valid R #' identifiers test_in <- data.frame(a = 1:3, b = 4:6) names(test_in) <- c("with spaces", "with,comma") dbWriteTable(con, table_name, test_in) #' if the `check.names` argument is `TRUE`, test_out <- check_df(dbReadTable(con, table_name, check.names = TRUE)) expect_identical(names(test_out), make.names(names(test_out), unique = TRUE)) expect_equal_df(test_out, setNames(test_in, names(test_out))) }, # read_table_check_names_false = function(ctx, con, table_name) { if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } #' If `check.names = FALSE`, the returned table has non-syntactic column names without quotes. test_in <- data.frame(a = 1:3, b = 4:6) names(test_in) <- c("with spaces", "with,comma") dbWriteTable(con, table_name, test_in) test_out <- check_df(dbReadTable(con, table_name, check.names = FALSE)) expect_equal_df(test_out, test_in) }, #' #' @section Failure modes: #' An error is raised when calling this method for a closed read_table_closed_connection = function(ctx, con, table_name) { dbWriteTable(con, table_name, data.frame(a = 1)) con2 <- local_closed_connection(ctx = ctx) expect_error(dbReadTable(con2, table_name)) }, #' or invalid connection. read_table_invalid_connection = function(ctx, con, table_name) { dbWriteTable(con, table_name, data.frame(a = 1)) con2 <- local_invalid_connection(ctx) expect_error(dbReadTable(con2, table_name)) }, #' An error is raised read_table_error = function(ctx, con, table_name) { dbWriteTable(con, table_name, data.frame(a = 1L)) #' if `name` cannot be processed with [dbQuoteIdentifier()] expect_error(dbReadTable(con, NA)) #' or if this results in a non-scalar. expect_error(dbReadTable(con, c(table_name, table_name))) #' Unsupported values for `row.names` and `check.names` #' (non-scalars, expect_error(dbReadTable(con, table_name, row.names = letters)) #' unsupported data types, expect_error(dbReadTable(con, table_name, row.names = list(1L))) expect_error(dbReadTable(con, table_name, check.names = 1L)) #' `NA` for `check.names`) expect_error(dbReadTable(con, table_name, check.names = NA)) #' also raise an error. }, #' @section Additional arguments: #' The following arguments are not part of the `dbReadTable()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `row.names` (default: `FALSE`) #' - `check.names` #' #' They must be provided as named arguments. #' See the "Value" section for details on their usage. #' @section Specification: #' The `name` argument is processed as follows, read_table_name = function(ctx, con) { #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { local_remove_test_table(con, table_name) test_in <- data.frame(a = 1L) dbWriteTable(con, table_name, test_in) #' - If an unquoted table name as string: `dbReadTable()` will do the #' quoting, test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done test_out <- check_df(dbReadTable(con, dbQuoteIdentifier(con, table_name))) expect_equal_df(test_out, test_in) } }, # NULL ) DBItest/R/spec-getting-started.R0000644000176200001440000000165514157037563016153 0ustar liggesusers#' spec_getting_started #' @family getting specifications #' @usage NULL #' @format NULL #' @importFrom desc desc_get_deps #' @keywords NULL #' @section Definition: spec_getting_started <- list( package_dependencies = function(ctx) { #' A DBI backend is an R package pkg_path <- get_pkg_path(ctx) pkg_deps_df <- desc_get_deps(pkg_path) pkg_imports <- pkg_deps_df[pkg_deps_df[["type"]] == "Imports", ][["package"]] #' which imports the \pkg{DBI} expect_true("DBI" %in% pkg_imports) #' and \pkg{methods} expect_true("methods" %in% pkg_imports) #' packages. }, # package_name = function(ctx) { pkg_name <- package_name(ctx) #' For better or worse, the names of many existing backends start with #' \sQuote{R}, e.g., \pkg{RSQLite}, \pkg{RMySQL}, \pkg{RSQLServer}; it is up #' to the backend author to adopt this convention or not. expect_match(pkg_name, "^R") }, # NULL ) DBItest/R/test-compliance.R0000644000176200001440000000064714157034343015176 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_compliance()]: #' Test full compliance to DBI NULL #' Test full compliance to DBI #' #' @inheritParams test_all #' @include test-transaction.R #' @family tests #' @export test_compliance <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Full compliance" run_tests(ctx, spec_compliance, skip, run_only, test_suite) } DBItest/R/spec-driver-data-type.R0000644000176200001440000000667314157037563016234 0ustar liggesusers#' spec_driver_data_type #' @family driver specifications #' @usage NULL #' @format NULL #' @keywords NULL #' @inherit test_data_type spec_driver_data_type <- list( data_type_formals = function() { # expect_equal(names(formals(dbDataType)), c("dbObj", "obj", "...")) }, # data_type_driver = function(ctx) { test_data_type(ctx, ctx$drv) }, # NULL ) #' test_data_type #' @param ctx,dbObj Arguments to internal test function #' @keywords internal test_data_type <- function(ctx, dbObj) { #' @return #' `dbDataType()` returns the SQL type that corresponds to the `obj` argument check_data_type <- function(value) { eval(bquote({ #' as a non-empty expect_match(dbDataType(dbObj, .(value)), ".") #' character string. if (!is.data.frame(value)) { expect_equal(length(dbDataType(dbObj, .(value))), 1L) } else { #' For data frames, a character vector with one element per column #' is returned. expect_equal(length(dbDataType(dbObj, value)), .(ncol(value))) } expect_type(dbDataType(dbObj, .(value)), "character") expect_visible(dbDataType(dbObj, .(value))) })) } #' #' @section Failure modes: #' An error is raised for invalid values for the `obj` argument such as a #' `NULL` value. expect_error(dbDataType(dbObj, NULL)) #' @section Specification: #' The backend can override the [dbDataType()] generic #' for its driver class. #' #' This generic expects an arbitrary object as second argument. #' To query the values returned by the default implementation, #' run `example(dbDataType, package = "DBI")`. #' If the backend needs to override this generic, #' it must accept all basic R data types as its second argument, namely expect_has_data_type <- function(value) { eval(bquote( expect_error(check_data_type(.(value)), NA) )) } expected_data_types <- list( #' [logical], logical(1), #' [integer], integer(1), #' [numeric], numeric(1), #' [character], character(1), #' dates (see [Dates]), Sys.Date(), #' date-time (see [DateTimeClasses]), Sys.time(), #' and [difftime]. Sys.time() - Sys.time(), #' If the database supports blobs, if (!isTRUE(ctx$tweaks$omit_blob_tests)) { #' this method also must accept lists of [raw] vectors, list(as.raw(0:10)) }, if (!isTRUE(ctx$tweaks$omit_blob_tests)) { #' and [blob::blob] objects. blob::blob(as.raw(0:10)) } ) lapply( compact(expected_data_types), expect_has_data_type ) expect_has_data_type(data.frame(a = 1, b = "2", stringsAsFactors = FALSE)) #' As-is objects (i.e., wrapped by [I()]) must be #' supported and return the same results as their unwrapped counterparts. lapply( compact(expected_data_types), function(value) { if (!is.null(value)) { eval(bquote( expect_error( expect_identical( dbDataType(dbObj, I(.(value))), dbDataType(dbObj, .(value)) ), NA ) )) } } ) #' The SQL data type for [factor] expect_identical( dbDataType(dbObj, letters), dbDataType(dbObj, factor(letters)) ) #' and [ordered] is the same as for character. expect_identical( dbDataType(dbObj, letters), dbDataType(dbObj, ordered(letters)) ) #' The behavior for other object types is not specified. } DBItest/R/spec-all.R0000644000176200001440000000025014157034343013575 0ustar liggesusersspec_all <- c( spec_getting_started, spec_driver, spec_connection, spec_result, spec_sql, spec_meta, spec_transaction, spec_compliance, spec_stress ) DBItest/R/run.R0000644000176200001440000000751114157034343012710 0ustar liggesusersrun_tests <- function(ctx, tests, skip, run_only, test_suite) { "!DEBUG run_tests(`test_suite`)" if (is.null(ctx)) { stop("Need to call make_context() to use the test_...() functions.", call. = FALSE) } if (!inherits(ctx, "DBItest_context")) { stop("ctx must be a DBItest_context object created by make_context().", call. = FALSE) } test_context <- paste0( "DBItest", if (!is.null(ctx$name)) paste0("[", ctx$name, "]"), ": ", test_suite ) context(test_context) tests <- tests[!vapply(tests, is.null, logical(1L))] tests <- get_run_only_tests(tests, run_only) if (is.null(skip)) { skip <- ctx$default_skip } test_names <- vctrs::vec_names2(tests, repair = "unique", quiet = TRUE) skipped <- get_skip_names(skip) skip_flag <- names(tests) %in% skipped if (length(tests) > 0) { global_con <- local_connection(ctx) } ok <- vapply( seq_along(tests), function(test_idx) { test_name <- test_names[[test_idx]] if (skip_flag[[test_idx]]) { FALSE } else { test_fun <- patch_test_fun(tests[[test_idx]]) fmls <- formals(test_fun) test_that(paste0(test_context, ": ", test_name), { args <- list() if ("ctx" %in% names(fmls)) { args <- c(args, list(ctx = ctx)) } if ("con" %in% names(fmls)) { args <- c(args, list(con = global_con)) } if ("local_con" %in% names(fmls)) { local_con <- local_connection(ctx) args <- c(args, list(local_con = local_con)) } if ("closed_con" %in% names(fmls)) { closed_con <- local_closed_connection(ctx) args <- c(args, list(closed_con = closed_con)) } if ("invalid_con" %in% names(fmls)) { invalid_con <- local_invalid_connection(ctx) args <- c(args, list(invalid_con = invalid_con)) } if ("table_name" %in% names(fmls)) { if (rlang::is_missing(fmls$table_name)) { table_name <- random_table_name() } else { table_name <- fmls$table_name } local_remove_test_table(global_con, table_name) args <- c(args, list(table_name = table_name)) } rlang::exec(test_fun, !!!args) }) } }, logical(1L) ) if (any(skip_flag)) { test_that(paste0(test_context, ": skipped tests"), { skip(paste0("DBItest::run_tests(): by request: ", paste(names(tests)[skip_flag], collapse = ", "))) }) } # to isolate test topics gc() ok } get_skip_names <- function(skip) { if (length(skip) == 0L) { return(character()) } names_all <- names(spec_all) names_all <- names_all[names_all != ""] skip_flags_all <- lapply(paste0("(?:^(?:", skip, ")$)"), grepl, names_all, perl = TRUE) skip_used <- vapply(skip_flags_all, any, logical(1L)) if (!all(skip_used)) { warning("Unused skip expressions: ", paste(skip[!skip_used], collapse = ", "), call. = FALSE ) } skip_flag_all <- Reduce(`|`, skip_flags_all) skip_tests <- names_all[skip_flag_all] skip_tests } get_run_only_tests <- function(tests, run_only) { names_all <- names(tests) names_all <- names_all[names_all != ""] if (is.null(run_only)) { return(tests) } run_only_flags_all <- lapply(paste0("(?:^(?:", run_only, ")$)"), grepl, names_all, perl = TRUE) run_only_flag_all <- Reduce(`|`, run_only_flags_all) run_only_tests <- names_all[run_only_flag_all] tests[run_only_tests] } patch_test_fun <- function(test_fun) { body(test_fun) <- wrap_all_statements_with_expect_no_warning(body(test_fun)) test_fun } wrap_all_statements_with_expect_no_warning <- function(block) { stopifnot(identical(block[[1]], quote(`{`))) block[-1] <- lapply(block[-1], function(x) expr(expect_warning(!!x, NA))) block } DBItest/R/spec-driver.R0000644000176200001440000000020614157034343014321 0ustar liggesusers#' @format NULL spec_driver <- c( spec_driver_constructor, spec_driver_data_type, spec_driver_get_info, spec_driver_connect ) DBItest/R/spec-sql-exists-table.R0000644000176200001440000000632014157037563016241 0ustar liggesusers#' spec_sql_exists_table #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_exists_table <- list( exists_table_formals = function() { # expect_equal(names(formals(dbExistsTable)), c("conn", "name", "...")) }, #' @return #' `dbExistsTable()` returns a logical scalar, `TRUE` if the table or view #' specified by the `name` argument exists, `FALSE` otherwise. exists_table = function(ctx, con, table_name = "dbit05") { expect_false(expect_visible(dbExistsTable(con, table_name))) penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) expect_true(expect_visible(dbExistsTable(con, table_name))) }, # second stage exists_table = function(ctx, con) { table_name <- "dbit05" expect_false(expect_visible(dbExistsTable(con, table_name))) }, #' #' This includes temporary tables if supported by the database. exists_table_temporary = function(ctx, con, table_name) { expect_false(expect_visible(dbExistsTable(con, table_name))) if (isTRUE(ctx$tweaks$temporary_tables)) { dbWriteTable(con, table_name, data.frame(a = 1L), temporary = TRUE) expect_true(expect_visible(dbExistsTable(con, table_name))) } }, #' #' @section Failure modes: #' An error is raised when calling this method for a closed exists_table_closed_connection = function(ctx, closed_con) { expect_error(dbExistsTable(closed_con, "test")) }, #' or invalid connection. exists_table_invalid_connection = function(ctx, invalid_con) { expect_error(dbExistsTable(invalid_con, "test")) }, #' An error is also raised exists_table_error = function(con, table_name) { dbWriteTable(con, table_name, data.frame(a = 1L)) #' if `name` cannot be processed with [dbQuoteIdentifier()] expect_error(dbExistsTable(con, NA)) #' or if this results in a non-scalar. expect_error(dbExistsTable(con, c(table_name, table_name))) }, #' @section Specification: #' The `name` argument is processed as follows, exists_table_name = function(ctx, con) { #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { local_remove_test_table(con, table_name) expect_false(dbExistsTable(con, table_name)) test_in <- data.frame(a = 1L) dbWriteTable(con, table_name, test_in) #' - If an unquoted table name as string: `dbExistsTable()` will do the #' quoting, expect_true(dbExistsTable(con, table_name)) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done expect_true(dbExistsTable(con, dbQuoteIdentifier(con, table_name))) } }, #' #' For all tables listed by [dbListTables()], `dbExistsTable()` returns `TRUE`. exists_table_list = function(con, table_name) { dbWriteTable(con, table_name, data.frame(a = 1)) for (table_name in dbListTables(con)) { eval(bquote(expect_true(dbExistsTable(con, .(table_name))))) } }, # NULL ) DBItest/R/tweaks.R0000644000176200001440000001415014157034343013377 0ustar liggesusers#' Tweaks for DBI tests #' #' The tweaks are a way to control the behavior of certain tests. Currently, #' you need to search the \pkg{DBItest} source code to understand which tests #' are affected by which tweaks. This function is usually called to set the #' `tweaks` argument in a [make_context()] call. #' #' @name tweaks #' @aliases NULL #' @examples #' \dontrun{ #' make_context(..., tweaks = tweaks(strict_identifier = TRUE)) #' } { # nolint tweak_names <- alist( #' @param ... `[any]`\cr #' Unknown tweaks are accepted, with a warning. The ellipsis #' also makes sure that you only can pass named arguments. "..." = , #' @param constructor_name `[character(1)]`\cr #' Name of the function that constructs the `Driver` object. "constructor_name" = NULL, #' @param constructor_relax_args `[logical(1)]`\cr #' If `TRUE`, allow a driver constructor with default values for all #' arguments; otherwise, require a constructor with empty argument list #' (default). "constructor_relax_args" = FALSE, #' @param strict_identifier `[logical(1)]`\cr #' Set to `TRUE` if the DBMS does not support arbitrarily-named #' identifiers even when quoting is used. "strict_identifier" = FALSE, #' @param omit_blob_tests `[logical(1)]`\cr #' Set to `TRUE` if the DBMS does not support a `BLOB` data #' type. "omit_blob_tests" = FALSE, #' @param current_needs_parens `[logical(1)]`\cr #' Set to `TRUE` if the SQL functions `current_date`, #' `current_time`, and `current_timestamp` require parentheses. "current_needs_parens" = FALSE, #' @param union `[function(character)]`\cr #' Function that combines several subqueries into one so that the #' resulting query returns the concatenated results of the subqueries "union" = function(x) paste(x, collapse = " UNION "), #' @param placeholder_pattern `[character]`\cr #' A pattern for placeholders used in [dbBind()], e.g., #' `"?"`, `"$1"`, or `":name"`. See #' [make_placeholder_fun()] for details. "placeholder_pattern" = NULL, #' @param logical_return `[function(logical)]`\cr #' A vectorized function that converts logical values to the data type #' returned by the DBI backend. "logical_return" = identity, #' @param date_cast `[function(character)]`\cr #' A vectorized function that creates an SQL expression for coercing a #' string to a date value. "date_cast" = function(x) paste0("date('", x, "')"), #' @param time_cast `[function(character)]`\cr #' A vectorized function that creates an SQL expression for coercing a #' string to a time value. "time_cast" = function(x) paste0("time('", x, "')"), #' @param timestamp_cast `[function(character)]`\cr #' A vectorized function that creates an SQL expression for coercing a #' string to a timestamp value. "timestamp_cast" = function(x) paste0("timestamp('", x, "')"), #' @param blob_cast `[function(character)]`\cr #' A vectorized function that creates an SQL expression for coercing a #' string to a blob value. "blob_cast" = identity, #' @param date_typed `[logical(1L)]`\cr #' Set to `FALSE` if the DBMS doesn't support a dedicated type for dates. "date_typed" = TRUE, #' @param time_typed `[logical(1L)]`\cr #' Set to `FALSE` if the DBMS doesn't support a dedicated type for times. "time_typed" = TRUE, #' @param timestamp_typed `[logical(1L)]`\cr #' Set to `FALSE` if the DBMS doesn't support a dedicated type for #' timestamps. "timestamp_typed" = TRUE, #' @param temporary_tables `[logical(1L)]`\cr #' Set to `FALSE` if the DBMS doesn't support temporary tables. "temporary_tables" = TRUE, #' @param list_temporary_tables `[logical(1L)]`\cr #' Set to `FALSE` if the DBMS doesn't support listing temporary tables. "list_temporary_tables" = TRUE, #' @param is_null_check `[function(character)]`\cr #' A vectorized function that creates an SQL expression for checking if a #' value is `NULL`. "is_null_check" = function(x) paste0("(", x, " IS NULL)"), #' @param create_table_as `[function(character(1), character(1))]`\cr #' A function that creates an SQL expression for creating a table #' from an SQL expression. "create_table_as" = function(table_name, query) paste0("CREATE TABLE ", table_name, " AS ", query), #' @param dbitest_version `[character(1)]`\cr #' Compatible DBItest version, default: "1.7.1". "dbitest_version" = "1.7.1", # Dummy argument NULL ) } # A helper function that constructs the tweaks() function in a DRY fashion. make_tweaks <- function(envir = parent.frame()) { fmls <- tweak_names[-length(tweak_names)] tweak_quoted <- lapply(setNames(nm = names(fmls)), as.name) tweak_quoted <- c(tweak_quoted) list_call <- as.call(c(quote(list), tweak_quoted)) fun <- eval(bquote( function() { unknown <- list(...) if (length(unknown) > 0) { if (is.null(names(unknown)) || any(names(unknown) == "")) { warning("All tweaks must be named", call. = FALSE) } else { warning("Unknown tweaks: ", paste(names(unknown), collapse = ", "), call. = FALSE ) } } ret <- .(list_call) ret <- ret[!vapply(ret, is.null, logical(1L))] structure(ret, class = "DBItest_tweaks") }, as.environment(list(list_call = list_call)) )) formals(fun) <- fmls environment(fun) <- envir fun } #' @export #' @rdname tweaks tweaks <- make_tweaks() #' @export format.DBItest_tweaks <- function(x, ...) { if (length(x) == 0L) { return("DBItest tweaks: Empty") } c( "DBItest tweaks:", unlist(mapply( function(name, value) { paste0(" ", name, ": ", format(value)[[1]]) }, names(x), unclass(x) )) ) } #' @export print.DBItest_tweaks <- function(x, ...) { cat(format(x), sep = "\n") } #' @export `$.DBItest_tweaks` <- function(x, tweak) { if (!(tweak %in% names(tweak_names))) { stop("Tweak not found: ", tweak, call. = FALSE) } NextMethod() } DBItest/R/spec-sql-list-objects.R0000644000176200001440000001350714157037563016244 0ustar liggesusers#' spec_sql_list_objects #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_list_objects <- list( list_objects_formals = function() { # expect_equal(names(formals(dbListObjects)), c("conn", "prefix", "...")) }, #' @return #' `dbListObjects()` list_objects = function(ctx, con, table_name = "dbit06") { objects <- dbListObjects(con) #' returns a data frame expect_is(objects, "data.frame") #' with columns cols <- c("table", "is_prefix") #' `table` and `is_prefix` (in that order), expect_equal(names(objects)[seq_along(cols)], cols) #' optionally with other columns with a dot (`.`) prefix. expect_true(all(grepl("^[.]", names(objects)[-seq_along(cols)]))) #' The `table` column is of type list. expect_equal(typeof(objects$table), "list") #' Each object in this list is suitable for use as argument in [dbQuoteIdentifier()]. expect_error(lapply(objects$table, dbQuoteIdentifier, conn = con), NA) #' The `is_prefix` column is a logical. expect_is(objects$is_prefix, "logical") #' This data frame contains one row for each object (schema, table expect_false(table_name %in% objects) #' and view) # TODO #' accessible from the prefix (if passed) or from the global namespace #' (if prefix is omitted). #' Tables added with [dbWriteTable()] penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) #' are part of the data frame. objects <- dbListObjects(con) quoted_tables <- vapply(objects$table, dbQuoteIdentifier, conn = con, character(1)) expect_true(dbQuoteIdentifier(con, table_name) %in% quoted_tables) }, # second stage list_objects = function(ctx, con) { #' As soon a table is removed from the database, #' it is also removed from the data frame of database objects. table_name <- "dbit06" objects <- dbListObjects(con) quoted_tables <- vapply(objects$table, dbQuoteIdentifier, conn = con, character(1)) expect_false(dbQuoteIdentifier(con, table_name) %in% quoted_tables) }, #' #' The same applies to temporary objects if supported by the database. list_objects_temporary = function(ctx, con, table_name) { if (isTRUE(ctx$tweaks$temporary_tables) && isTRUE(ctx$tweaks$list_temporary_tables)) { dbWriteTable(con, table_name, data.frame(a = 1L), temporary = TRUE) objects <- dbListObjects(con) quoted_tables <- vapply(objects$table, dbQuoteIdentifier, conn = con, character(1)) expect_true(dbQuoteIdentifier(con, table_name) %in% quoted_tables) } }, #' #' The returned names are suitable for quoting with `dbQuoteIdentifier()`. list_objects_quote = function(ctx, con) { if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { local_remove_test_table(con, table_name) dbWriteTable(con, dbQuoteIdentifier(con, table_name), data.frame(a = 2L)) objects <- dbListObjects(con) quoted_tables <- vapply(objects$table, dbQuoteIdentifier, conn = con, character(1)) expect_true(dbQuoteIdentifier(con, table_name) %in% quoted_tables) } }, #' #' @section Failure modes: #' An error is raised when calling this method for a closed list_objects_closed_connection = function(ctx, closed_con) { expect_error(dbListObjects(closed_con)) }, #' or invalid connection. list_objects_invalid_connection = function(ctx, invalid_con) { expect_error(dbListObjects(invalid_con)) }, #' @section Specification: list_objects_features = function(ctx, con) { objects <- dbListObjects(con) #' The `table` object can be quoted with [dbQuoteIdentifier()]. sql <- lapply(objects$table, dbQuoteIdentifier, conn = con) #' The result of quoting can be passed to [dbUnquoteIdentifier()]. unquoted <- vapply(sql, dbUnquoteIdentifier, conn = con, list(1)) #' The unquoted results are equal to the original `table` object. expect_equal(unquoted, unclass(objects$table)) #' (For backends it may be convenient to use the [Id] class, but this is #' not required.) #' #' The `prefix` column indicates if the `table` value refers to a table #' or a prefix. #' For a call with the default `prefix = NULL`, the `table` #' values that have `is_prefix == FALSE` correspond to the tables #' returned from [dbListTables()], non_prefix_objects <- vapply( objects$table[!objects$is_prefix], dbQuoteIdentifier, conn = con, character(1) ) all_tables <- dbQuoteIdentifier(con, dbListTables(con)) expect_equal(sort(non_prefix_objects), sort(as.character(all_tables))) if (!any(objects$is_prefix)) { skip("No schemas available") } #' #' Values in `table` column that have `is_prefix == TRUE` can be #' passed as the `prefix` argument to another call to `dbListObjects()`. #' For the data frame returned from a `dbListObject()` call with the #' `prefix` argument set, all `table` values where `is_prefix` is #' `FALSE` can be used in a call to [dbExistsTable()] which returns #' `TRUE`. for (schema in utils::head(objects$table[objects$is_prefix])) { sub_objects <- dbListObjects(con, prefix = schema) for (sub_table in utils::head(sub_objects$table[!sub_objects$is_prefix])) { # HACK HACK HACK for RMariaDB on OS X (#188) if (!identical(sub_table, Id(schema = "information_schema", table = "FILES"))) { # eval(bquote()) preserves the SQL class, even if it's not apparent # in the output eval(bquote(expect_true( dbExistsTable(con, .(sub_table)), label = paste0("dbExistsTable(", dbQuoteIdentifier(con, sub_table), ")") ))) } } } }, # NULL ) DBItest/R/import-dbi.R0000644000176200001440000000145014157034343014146 0ustar liggesusers# The imports below were generated using the following call: # @import.gen::importFrom("DBI") #' @importFrom DBI dbAppendTable dbBegin dbBind dbBreak dbCallProc dbClearResult dbColumnInfo #' @importFrom DBI dbCommit dbConnect dbCreateTable dbDataType dbDisconnect #' @importFrom DBI dbExecute dbExistsTable dbFetch dbGetDBIVersion #' @importFrom DBI dbGetInfo dbGetQuery dbGetRowCount dbGetRowsAffected #' @importFrom DBI dbGetStatement dbHasCompleted dbIsValid #' @importFrom DBI dbListConnections dbListFields dbListObjects dbListTables #' @importFrom DBI dbQuoteIdentifier dbQuoteLiteral dbQuoteString dbReadTable dbRemoveTable #' @importFrom DBI dbRollback dbSendQuery dbSendStatement dbSetDataMappings #' @importFrom DBI dbUnquoteIdentifier dbWithTransaction dbWriteTable #' @importFrom DBI Id SQL NULL DBItest/R/spec-connection-disconnect.R0000644000176200001440000000203314157037563017323 0ustar liggesusers#' spec_connection_disconnect #' @family connection specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_connection_disconnect <- list( disconnect_formals = function() { # expect_equal(names(formals(dbDisconnect)), c("conn", "...")) }, #' @return can_disconnect = function(ctx) { con <- connect(ctx) #' `dbDisconnect()` returns `TRUE`, invisibly. expect_invisible_true(dbDisconnect(con)) }, #' #' @section Failure modes: #' A warning is issued on garbage collection when a connection has been #' released without calling `dbDisconnect()`, #' but this cannot be tested automatically. #' A warning is issued immediately when calling `dbDisconnect()` on an #' already disconnected disconnect_closed_connection = function(ctx, closed_con) { expect_warning(dbDisconnect(closed_con)) }, #' or invalid connection. disconnect_invalid_connection = function(ctx, invalid_con) { expect_warning(dbDisconnect(invalid_con)) }, # NULL ) DBItest/R/spec-meta-column-info.R0000644000176200001440000000647114157037563016221 0ustar liggesusers#' spec_meta_column_info #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_column_info <- list( column_info_formals = function() { # expect_equal(names(formals(dbColumnInfo)), c("res", "...")) }, #' @return #' `dbColumnInfo()` column_info = function(ctx, con, table_name) { penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) res <- local_result(dbSendQuery(con, paste0("SELECT * FROM ", table_name))) fields <- dbColumnInfo(res) #' returns a data frame expect_s3_class(fields, "data.frame") #' with at least two columns `"name"` and `"type"` (in that order) expect_equal(names(fields)[1:2], c("name", "type")) #' (and optional columns that start with a dot). expect_true(all(grepl("^[.]", names(fields)[-1:-2]))) #' The `"name"` and `"type"` columns contain the names and types #' of the R columns of the data frame that is returned from [`dbFetch()`]. penguins_ret <- dbFetch(res) expect_identical(fields$name, names(penguins_ret)) #' The `"type"` column is of type `character` and only for information. expect_type(fields$type, "character") #' Do not compute on the `"type"` column, instead use `dbFetch(res, n = 0)` #' to create a zero-row data frame initialized with the correct data types. }, #' #' @section Failure modes: #' An attempt to query columns for a closed result set raises an error. column_info_closed = function(con) { query <- trivial_query() res <- dbSendQuery(con, query) dbClearResult(res) expect_error(dbColumnInfo(res)) }, #' @section Specification: #' #' A column named `row_names` is treated like any other column. column_info_row_names = function(con, table_name) { dbWriteTable(con, table_name, data.frame(a = 1L, row_names = 2L)) res <- local_result(dbSendQuery(con, paste0("SELECT * FROM ", table_name))) expect_identical(dbColumnInfo(res)$name, c("a", "row_names")) }, #' column_info_consistent = function(ctx, con) { res <- local_result(dbSendQuery(con, "SELECT 1.5 AS a, 2.5 AS b")) #' The column names are always consistent info <- dbColumnInfo(res) #' with the data returned by `dbFetch()`. data <- dbFetch(res) expect_identical(info$name, names(data)) }, #' column_info_consistent_unnamed = function(ctx, con) { # odbc package if (as.package_version(ctx$tweaks$dbitest_version) < "1.7.2") { skip(paste0("tweak: dbitest_version: ", ctx$tweaks$dbitest_version)) } #' If the query returns unnamed columns, res <- local_result(dbSendQuery(con, "SELECT 1.5, 2.5 AS a, 1.5, 3.5")) info <- dbColumnInfo(res) data <- dbFetch(res) expect_identical(info$name, names(data)) expect_equal(data[["a"]], 2.5) #' non-empty and non-`NA` names are assigned. expect_false(anyNA(names(data))) expect_true(all(names(data) != "")) }, #' column_info_consistent_keywords = function(ctx, con) { #' Column names that correspond to SQL or R keywords are left unchanged. res <- local_result(dbSendQuery(con, paste0("SELECT 1.5 AS ", dbQuoteIdentifier(con, "for")))) info <- dbColumnInfo(res) data <- dbFetch(res) expect_identical(info$name, names(data)) expect_equal(data[["for"]], 1.5) }, # NULL ) DBItest/R/s4.R0000644000176200001440000000234514157034343012432 0ustar liggesusers# http://stackoverflow.com/a/39880324/946850 s4_methods <- function(env, pkg_fun = NULL) { generics <- methods::getGenerics(env) if (is.null(pkg_fun)) { ok <- TRUE } else { ok <- pkg_fun(generics@package) } res <- Map( generics@.Data[ok], generics@package[ok], USE.NAMES = TRUE, f = function(name, package) { what <- methods::methodsPackageMetaName("T", paste(name, package, sep = ":")) table <- get(what, envir = env) mget(ls(table, all.names = TRUE), envir = table) } ) unlist(res, recursive = FALSE) } s4_real_argument_names <- function(s4_method) { expect_s4_class(s4_method, "function") expect_s4_class(s4_method, "MethodDefinition") unwrapped <- s4_unwrap(s4_method) names(formals(unwrapped)) } s4_unwrap <- function(s4_method) { # Only unwrap if body is of the following form: # { # .local <- function(x, y, z, ...) { # ... # } # ... # } method_body <- body(s4_method) if (inherits(method_body, "{")) { local_def <- method_body[[2]] if (inherits(local_def, "<-") && local_def[[2]] == quote(.local)) { local_fun <- local_def[[3]] if (inherits(local_fun, "function")) { return(local_fun) } } } s4_method } DBItest/R/spec-sql-quote-identifier.R0000644000176200001440000001356614157037563017124 0ustar liggesusers#' spec_sql_quote_identifier #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_quote_identifier <- list( quote_identifier_formals = function() { # expect_equal(names(formals(dbQuoteIdentifier)), c("conn", "x", "...")) }, #' @return quote_identifier_return = function(con) { #' `dbQuoteIdentifier()` returns an object that can be coerced to [character], simple_out <- dbQuoteIdentifier(con, "simple") expect_error(as.character(simple_out), NA) expect_type(as.character(simple_out), "character") }, # quote_identifier_vectorized = function(ctx, con) { #' of the same length as the input. simple <- "simple" simple_out <- dbQuoteIdentifier(con, simple) expect_equal(length(simple_out), 1L) letters_out <- dbQuoteIdentifier(con, letters) expect_equal(length(letters_out), length(letters)) #' For an empty character vector this function returns a length-0 object. empty <- character() empty_out <- dbQuoteIdentifier(con, empty) expect_equal(length(empty_out), 0L) #' The names of the input argument are preserved in the output. unnamed <- letters unnamed_out <- dbQuoteIdentifier(con, unnamed) expect_null(names(unnamed_out)) named <- stats::setNames(LETTERS[1:3], letters[1:3]) named_out <- dbQuoteIdentifier(con, named) expect_equal(names(named_out), letters[1:3]) #' When passing the returned object again to `dbQuoteIdentifier()` #' as `x` #' argument, it is returned unchanged. expect_identical(dbQuoteIdentifier(con, simple_out), simple_out) expect_identical(dbQuoteIdentifier(con, letters_out), letters_out) expect_identical(dbQuoteIdentifier(con, empty_out), empty_out) #' Passing objects of class [SQL] should also return them unchanged. expect_identical(dbQuoteIdentifier(con, SQL(simple)), SQL(simple)) expect_identical(dbQuoteIdentifier(con, SQL(letters)), SQL(letters)) expect_identical(dbQuoteIdentifier(con, SQL(empty)), SQL(empty)) #' (For backends it may be most convenient to return [SQL] objects #' to achieve this behavior, but this is not required.) }, #' #' @section Failure modes: quote_identifier_error = function(ctx, con) { #' #' An error is raised if the input contains `NA`, expect_error(dbQuoteIdentifier(con, NA)) expect_error(dbQuoteIdentifier(con, NA_character_)) expect_error(dbQuoteIdentifier(con, c("a", NA_character_))) #' but not for an empty string. expect_error(dbQuoteIdentifier(con, ""), NA) }, #' @section Specification: #' Calling [dbGetQuery()] for a query of the format `SELECT 1 AS ...` #' returns a data frame with the identifier, unquoted, as column name. quote_identifier = function(ctx, con) { #' Quoted identifiers can be used as table and column names in SQL queries, simple <- dbQuoteIdentifier(con, "simple") #' in particular in queries like `SELECT 1 AS ...` query <- trivial_query(column = simple) rows <- check_df(dbGetQuery(con, query)) expect_identical(names(rows), "simple") expect_identical(unlist(unname(rows)), 1.5) #' and `SELECT * FROM (SELECT 1) ...`. query <- paste0("SELECT * FROM (", trivial_query(), ") ", simple) rows <- check_df(dbGetQuery(con, query)) expect_identical(unlist(unname(rows)), 1.5) }, #' The method must use a quoting mechanism that is unambiguously different #' from the quoting mechanism used for strings, so that a query like quote_identifier_string = function(ctx, con) { #' `SELECT ... FROM (SELECT 1 AS ...)` query <- paste0( "SELECT ", dbQuoteIdentifier(con, "b"), " FROM (", "SELECT 1 AS ", dbQuoteIdentifier(con, "a"), ")" ) #' throws an error if the column names do not match. eval(bquote(expect_error(dbGetQuery(con, .(query))))) }, # quote_identifier_special = function(ctx, con) { #' #' The method can quote column names that #' contain special characters such as a space, with_space_in <- "with space" with_space <- dbQuoteIdentifier(con, with_space_in) #' a dot, with_dot_in <- "with.dot" with_dot <- dbQuoteIdentifier(con, with_dot_in) #' a comma, with_comma_in <- "with,comma" with_comma <- dbQuoteIdentifier(con, with_comma_in) #' or quotes used to mark strings with_quote_in <- as.character(dbQuoteString(con, "a")) with_quote <- dbQuoteIdentifier(con, with_quote_in) #' or identifiers, empty_in <- "" empty <- dbQuoteIdentifier(con, empty_in) quoted_empty <- dbQuoteIdentifier(con, as.character(empty)) quoted_with_space <- dbQuoteIdentifier(con, as.character(with_space)) quoted_with_dot <- dbQuoteIdentifier(con, as.character(with_dot)) quoted_with_comma <- dbQuoteIdentifier(con, as.character(with_comma)) quoted_with_quote <- dbQuoteIdentifier(con, as.character(with_quote)) #' if the database supports this. if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } #' In any case, checking the validity of the identifier #' should be performed only when executing a query, #' and not by `dbQuoteIdentifier()`. query <- paste0( "SELECT ", "2.5 as", with_space, ",", "3.5 as", with_dot, ",", "4.5 as", with_comma, ",", "5.5 as", with_quote, ",", "6.5 as", quoted_empty, ",", "7.5 as", quoted_with_space, ",", "8.5 as", quoted_with_dot, ",", "9.5 as", quoted_with_comma, ",", "10.5 as", quoted_with_quote ) rows <- check_df(dbGetQuery(con, query)) expect_identical( names(rows), c( with_space_in, with_dot_in, with_comma_in, with_quote_in, as.character(empty), as.character(with_space), as.character(with_dot), as.character(with_comma), as.character(with_quote) ) ) expect_identical(unlist(unname(rows)), 2:10 + 0.5) }, # NULL ) DBItest/R/spec-result-clear-result.R0000644000176200001440000000314114157037563016754 0ustar liggesusers#' spec_result_clear_result #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_clear_result <- list( clear_result_formals = function() { # expect_equal(names(formals(dbClearResult)), c("res", "...")) }, #' @return #' `dbClearResult()` returns `TRUE`, invisibly, for result sets obtained from #' both `dbSendQuery()` clear_result_return_query = function(con) { res <- dbSendQuery(con, trivial_query()) expect_invisible_true(dbClearResult(res)) }, #' and `dbSendStatement()`. clear_result_return_statement = function(ctx, con, table_name) { res <- dbSendStatement(con, ctx$tweaks$create_table_as(table_name, "SELECT 1")) expect_invisible_true(dbClearResult(res)) }, #' #' @section Failure modes: #' An attempt to close an already closed result set issues a warning cannot_clear_result_twice_query = function(con) { res <- dbSendQuery(con, trivial_query()) dbClearResult(res) expect_warning(expect_invisible_true(dbClearResult(res))) }, #' in both cases. cannot_clear_result_twice_statement = function(ctx, con, table_name) { res <- dbSendStatement(con, ctx$tweaks$create_table_as(table_name, "SELECT 1")) dbClearResult(res) expect_warning(expect_invisible_true(dbClearResult(res))) }, #' @section Specification: #' `dbClearResult()` frees all resources associated with retrieving #' the result of a query or update operation. #' The DBI backend can expect a call to `dbClearResult()` for each #' [dbSendQuery()] or [dbSendStatement()] call. NULL ) DBItest/R/context.R0000644000176200001440000000441714157034343013572 0ustar liggesusers#' Test contexts #' #' Create a test context, set and query the default context. #' #' @param drv `[DBIConnector]`\cr #' An object of class [DBIConnector-class] that describes how to connect #' to the database. #' @param connect_args `[named list]`\cr Deprecated. #' @param set_as_default `[logical(1)]`\cr Should the created context be #' set as default context? #' @param tweaks `[DBItest_tweaks]`\cr Tweaks as constructed by the #' [tweaks()] function. #' @param ctx `[DBItest_context]`\cr A test context. #' @param name `[character]`\cr An optional name of the context which will #' be used in test messages. #' @param default_skip `[character]`\cr Default value of `skip` argument #' to [test_all()] and other testing functions. #' #' @return `[DBItest_context]`\cr A test context, for #' `set_default_context` the previous default context (invisibly) or #' `NULL`. #' #' @rdname context #' @importFrom methods is new #' @export #' @example examples/make_context.R make_context <- function(drv, connect_args = NULL, set_as_default = TRUE, tweaks = NULL, name = NULL, default_skip = NULL) { if (is.null(drv)) { abort("drv cannot be NULL.") } if (is(drv, "DBIDriver")) { if (is.null(connect_args)) { connect_args <- list() } cnr <- new("DBIConnector", .drv = drv, .conn_args = connect_args) } else if (is(drv, "DBIConnector")) { cnr <- drv drv <- cnr@.drv } else { abort("drv must be of class DBIDriver or DBIConnector.") } if (is.null(tweaks)) { tweaks <- tweaks() } ctx <- structure( list( cnr = cnr, drv = drv, tweaks = tweaks, name = name, default_skip = default_skip ), class = "DBItest_context" ) if (set_as_default) { set_default_context(ctx) } ctx } #' @rdname context #' @export set_default_context <- function(ctx) { old_ctx <- .ctx_env$default_context .ctx_env$default_context <- ctx invisible(old_ctx) } #' @rdname context #' @export get_default_context <- function() { .ctx_env$default_context } package_name <- function(ctx) { attr(class(ctx$drv), "package") } connect <- function(ctx, ...) { quos <- enquos(...) eval_tidy(quo(dbConnect(ctx$cnr, !!!quos))) } .ctx_env <- new.env(parent = emptyenv()) set_default_context(NULL) DBItest/R/spec-meta-bind-tester-extra.R0000644000176200001440000000101014157034343017305 0ustar liggesusersBindTesterExtra <- R6::R6Class( "BindTesterExtra", portable = TRUE, # public = list( check_return_value = function(bind_res, res) invisible(NULL), patch_bind_values = identity, bind_error = function() NA, requires_names = function() NA, is_repeated = function() FALSE, is_premature_clear = function() FALSE, is_untouched = function() FALSE ) ) new_bind_tester_extra <- function(...) { R6::R6Class( inherit = BindTesterExtra, portable = TRUE, public = list(...) ) } DBItest/R/spec-connection-get-info.R0000644000176200001440000000215414157037563016706 0ustar liggesusers#' spec_connection_get_info #' @family connection specifications #' @usage NULL #' @format NULL #' @keywords NULL #' @rdname spec_get_info spec_connection_get_info <- list( #' @return #' For objects of class [DBIConnection-class], `dbGetInfo()` get_info_connection = function(con) { info <- dbGetInfo(con) #' returns a named list expect_type(info, "list") info_names <- names(info) #' that contains at least the following components: #' necessary_names <- c( #' - `db.version`: version of the database server, "db.version", #' - `dbname`: database name, "dbname", #' - `username`: username to connect to the database, "username", #' - `host`: hostname of the database server, "host", #' - `port`: port on the database server. "port" ) for (name in necessary_names) { eval(bquote( expect_true(.(name) %in% info_names) )) } #' It must not contain a `password` component. expect_false("password" %in% info_names) #' Components that are not applicable should be set to `NA`. }, # NULL ) DBItest/R/spec-meta.R0000644000176200001440000000044314157034343013757 0ustar liggesusers#' @format NULL spec_meta <- c( spec_meta_bind, spec_meta_is_valid, spec_meta_has_completed, spec_meta_get_statement, spec_meta_column_info, spec_meta_get_row_count, spec_meta_get_rows_affected, spec_meta_get_info_result, # no 64-bit or time input data type yet NULL ) DBItest/R/spec-result-send-statement.R0000644000176200001440000001173414157037563017314 0ustar liggesusers#' spec_result_send_statement #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_send_statement <- list( send_statement_formals = function() { # expect_equal(names(formals(dbSendStatement)), c("conn", "statement", "...")) }, #' @return #' `dbSendStatement()` returns send_statement_trivial = function(ctx, con, table_name) { res <- expect_visible(dbSendStatement(con, trivial_statement(ctx, table_name))) #' an S4 object that inherits from [DBIResult-class]. expect_s4_class(res, "DBIResult") #' The result set can be used with [dbGetRowsAffected()] to #' determine the number of rows affected by the query. expect_error(dbGetRowsAffected(res), NA) #' Once you have finished using a result, make sure to clear it #' with [dbClearResult()]. dbClearResult(res) }, #' #' @section Failure modes: #' An error is raised when issuing a statement over a closed send_statement_closed_connection = function(ctx, closed_con) { table_name <- "dbit10" expect_error(dbSendStatement(closed_con, trivial_statement(ctx, table_name = table_name))) }, #' or invalid connection, send_statement_invalid_connection = function(ctx, invalid_con) { table_name <- "dbit11" expect_error(dbSendStatement(invalid_con, trivial_statement(ctx, table_name = table_name))) }, #' or if the statement is not a non-`NA` string. send_statement_non_string = function(con) { expect_error(dbSendStatement(con, character())) expect_error(dbSendStatement(con, letters)) expect_error(dbSendStatement(con, NA_character_)) }, #' An error is also raised if the syntax of the query is invalid #' and all query parameters are given (by passing the `params` argument) #' or the `immediate` argument is set to `TRUE`. #' #' @section Failure modes: send_statement_syntax_error = function(con) { expect_error(dbSendStatement(con, "CREATTE", params = list())) expect_error(dbSendStatement(con, "CREATTE", immediate = TRUE)) }, #' @section Specification: send_statement_result_valid = function(ctx, con, table_name) { #' No warnings occur under normal conditions. expect_warning(res <- dbSendStatement(con, trivial_statement(ctx, table_name)), NA) #' When done, the DBIResult object must be cleared with a call to #' [dbClearResult()]. dbClearResult(res) }, # send_statement_stale_warning = function(ctx) { #' Failure to clear the result set leads to a warning #' when the connection is closed. con <- connect(ctx) on.exit(dbDisconnect(con)) expect_warning(dbSendStatement(con, trivial_query()), NA) expect_warning({ dbDisconnect(con) gc() }) on.exit(NULL) }, #' If the backend supports only one open result set per connection, send_statement_only_one_result_set = function(ctx, con, table_name) { res1 <- dbSendStatement(con, trivial_statement(ctx, table_name)) other_table_name <- random_table_name() local_remove_test_table(con, other_table_name) #' issuing a second query invalidates an already open result set #' and raises a warning. query <- ctx$tweaks$create_table_as(other_table_name, "SELECT 1 AS a") expect_warning(res2 <- dbSendStatement(con, query)) expect_false(dbIsValid(res1)) #' The newly opened result set is valid expect_true(dbIsValid(res2)) #' and must be cleared with `dbClearResult()`. dbClearResult(res2) }, #' @section Additional arguments: #' The following arguments are not part of the `dbSendStatement()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `params` (default: `NULL`) #' - `immediate` (default: `NULL`) #' #' They must be provided as named arguments. #' See the "Specification" sections for details on their usage. #' @section Specification: #' #' The `param` argument allows passing query parameters, see [dbBind()] for details. send_statement_params = function(ctx, con) { placeholder_funs <- get_placeholder_funs(ctx) for (placeholder_fun in placeholder_funs) { table_name <- random_table_name() local_remove_test_table(con, table_name) dbWriteTable(con, table_name, data.frame(a = as.numeric(1:3))) placeholder <- placeholder_fun(1) query <- paste0("DELETE FROM ", table_name, " WHERE a > ", placeholder) values <- 1.5 params <- stats::setNames(list(values), names(placeholder)) rs <- dbSendStatement(con, query, params = params) expect_equal(dbGetRowsAffected(rs), 2, info = placeholder) dbClearResult(rs) } }, #' @inheritSection spec_result_get_query Specification for the `immediate` argument send_statement_immediate = function(ctx, con, table_name) { res <- expect_visible(dbSendStatement(con, trivial_statement(ctx, table_name), immediate = TRUE)) expect_s4_class(res, "DBIResult") expect_error(dbGetRowsAffected(res), NA) dbClearResult(res) }, # NULL ) DBItest/R/spec-result-send-query.R0000644000176200001440000001035614157037563016454 0ustar liggesusers#' spec_result_send_query #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_send_query <- list( send_query_formals = function() { # expect_equal(names(formals(dbSendQuery)), c("conn", "statement", "...")) }, #' @return #' `dbSendQuery()` returns send_query_trivial = function(con) { res <- expect_visible(dbSendQuery(con, trivial_query())) #' an S4 object that inherits from [DBIResult-class]. expect_s4_class(res, "DBIResult") #' The result set can be used with [dbFetch()] to extract records. expect_equal(check_df(dbFetch(res))[[1]], 1.5) #' Once you have finished using a result, make sure to clear it #' with [dbClearResult()]. dbClearResult(res) }, #' #' @section Failure modes: #' An error is raised when issuing a query over a closed send_query_closed_connection = function(ctx, closed_con) { expect_error(dbSendQuery(closed_con, trivial_query())) }, #' or invalid connection, send_query_invalid_connection = function(ctx, invalid_con) { expect_error(dbSendQuery(invalid_con, trivial_query())) }, #' or if the query is not a non-`NA` string. send_query_non_string = function(con) { expect_error(dbSendQuery(con, character())) expect_error(dbSendQuery(con, letters)) expect_error(dbSendQuery(con, NA_character_)) }, #' An error is also raised if the syntax of the query is invalid #' and all query parameters are given (by passing the `params` argument) #' or the `immediate` argument is set to `TRUE`. #' #' @section Failure modes: send_query_syntax_error = function(con) { expect_error(dbSendQuery(con, "SELLECT", params = list())) expect_error(dbSendQuery(con, "SELLECT", immediate = TRUE)) }, #' @section Additional arguments: #' The following arguments are not part of the `dbSendQuery()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `params` (default: `NULL`) #' - `immediate` (default: `NULL`) #' #' They must be provided as named arguments. #' See the "Specification" sections for details on their usage. #' @section Specification: send_query_result_valid = function(con) { #' No warnings occur under normal conditions. expect_warning(res <- dbSendQuery(con, trivial_query()), NA) #' When done, the DBIResult object must be cleared with a call to #' [dbClearResult()]. dbClearResult(res) }, # send_query_stale_warning = function(ctx) { #' Failure to clear the result set leads to a warning #' when the connection is closed. con <- connect(ctx) on.exit(dbDisconnect(con)) expect_warning(dbSendQuery(con, trivial_query()), NA) expect_warning({ dbDisconnect(con) gc() }) on.exit(NULL) }, #' #' If the backend supports only one open result set per connection, send_query_only_one_result_set = function(con) { res1 <- dbSendQuery(con, trivial_query()) #' issuing a second query invalidates an already open result set #' and raises a warning. expect_warning(res2 <- dbSendQuery(con, "SELECT 2")) expect_false(dbIsValid(res1)) #' The newly opened result set is valid expect_true(dbIsValid(res2)) #' and must be cleared with `dbClearResult()`. dbClearResult(res2) }, #' #' The `param` argument allows passing query parameters, see [dbBind()] for details. send_query_params = function(ctx, con) { placeholder_funs <- get_placeholder_funs(ctx) for (placeholder_fun in placeholder_funs) { placeholder <- placeholder_fun(1) query <- paste0("SELECT ", placeholder, " + 1.0 AS a") values <- trivial_values(3) - 1 params <- stats::setNames(list(values), names(placeholder)) rs <- dbSendQuery(con, query, params = params) ret <- dbFetch(rs) expect_equal(ret, trivial_df(3), info = placeholder) dbClearResult(rs) } }, #' @inheritSection spec_result_get_query Specification for the `immediate` argument send_query_immediate = function(con, table_name) { res <- expect_visible(dbSendQuery(con, trivial_query(), immediate = TRUE)) expect_s4_class(res, "DBIResult") expect_error(dbGetRowsAffected(res), NA) dbClearResult(res) }, # NULL ) DBItest/R/spec-sql-list-tables.R0000644000176200001440000000466414157037563016071 0ustar liggesusers#' spec_sql_list_tables #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_list_tables <- list( list_tables_formals = function() { # expect_equal(names(formals(dbListTables)), c("conn", "...")) }, #' @return #' `dbListTables()` list_tables = function(ctx, con, table_name = "dbit07") { tables <- dbListTables(con) #' returns a character vector expect_type(tables, "character") #' that enumerates all tables expect_false(table_name %in% tables) #' and views # TODO #' in the database. #' Tables added with [dbWriteTable()] penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) #' are part of the list. tables <- dbListTables(con) expect_true(table_name %in% tables) }, # second stage list_tables = function(ctx, con) { #' As soon a table is removed from the database, #' it is also removed from the list of database tables. table_name <- "dbit07" tables <- dbListTables(con) expect_false(table_name %in% tables) }, #' #' The same applies to temporary tables if supported by the database. list_tables_temporary = function(ctx, con, table_name) { if (isTRUE(ctx$tweaks$temporary_tables) && isTRUE(ctx$tweaks$list_temporary_tables)) { dbWriteTable(con, table_name, data.frame(a = 1L), temporary = TRUE) tables <- dbListTables(con) expect_true(table_name %in% tables) } }, #' #' The returned names are suitable for quoting with `dbQuoteIdentifier()`. list_tables_quote = function(ctx, con) { if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { local_remove_test_table(con, table_name) dbWriteTable(con, dbQuoteIdentifier(con, table_name), data.frame(a = 2L)) tables <- dbListTables(con) expect_true(table_name %in% tables) expect_true(dbQuoteIdentifier(con, table_name) %in% dbQuoteIdentifier(con, tables)) } }, #' #' @section Failure modes: #' An error is raised when calling this method for a closed list_tables_closed_connection = function(ctx, closed_con) { expect_error(dbListTables(closed_con)) }, #' or invalid connection. list_tables_invalid_connection = function(ctx, invalid_con) { expect_error(dbListTables(invalid_con)) }, # NULL ) DBItest/R/spec-meta-is-valid.R0000644000176200001440000000445614157037563015504 0ustar liggesusers#' spec_meta_is_valid #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_is_valid <- list( is_valid_formals = function() { # expect_equal(names(formals(dbIsValid)), c("dbObj", "...")) }, #' @return #' `dbIsValid()` returns a logical scalar, #' `TRUE` if the object specified by `dbObj` is valid, #' `FALSE` otherwise. is_valid_connection = function(ctx) { con <- connect(ctx) #' A [DBIConnection-class] object is initially valid, expect_true(expect_visible(dbIsValid(con))) expect_error(dbDisconnect(con), NA) #' and becomes invalid after disconnecting with [dbDisconnect()]. expect_false(expect_visible(dbIsValid(con))) }, # is_valid_stale_connection = function(ctx, invalid_con) { #' For an invalid connection object (e.g., for some drivers if the object #' is saved to a file and then restored), the method also returns `FALSE`. expect_false(expect_visible(dbIsValid(invalid_con))) }, # is_valid_result_query = function(con) { query <- trivial_query() res <- dbSendQuery(con, query) #' A [DBIResult-class] object is valid after a call to [dbSendQuery()], expect_true(expect_visible(dbIsValid(res))) expect_error(dbFetch(res), NA) #' and stays valid even after all rows have been fetched; expect_true(expect_visible(dbIsValid(res))) dbClearResult(res) #' only clearing it with [dbClearResult()] invalidates it. expect_false(dbIsValid(res)) }, # is_valid_result_statement = function(con, table_name) { query <- paste0("CREATE TABLE ", table_name, " (a ", dbDataType(con, 1L), ")") res <- dbSendStatement(con, query) #' A [DBIResult-class] object is also valid after a call to [dbSendStatement()], expect_true(expect_visible(dbIsValid(res))) #' and stays valid after querying the number of rows affected; expect_error(dbGetRowsAffected(res), NA) expect_true(expect_visible(dbIsValid(res))) dbClearResult(res) #' only clearing it with [dbClearResult()] invalidates it. expect_false(dbIsValid(res)) }, #' If the connection to the database system is dropped (e.g., due to #' connectivity problems, server failure, etc.), `dbIsValid()` should return #' `FALSE`. This is not tested automatically. NULL ) DBItest/R/spec-result.R0000644000176200001440000000237214157034343014352 0ustar liggesusers#' @format NULL spec_result <- c( spec_result_send_query, spec_result_fetch, spec_result_clear_result, spec_result_get_query, spec_result_send_statement, spec_result_execute, spec_result_create_table_with_data_type, spec_result_roundtrip ) # Helpers ----------------------------------------------------------------- union <- function(..., .order_by = NULL, .ctx) { query <- .ctx$tweaks$union(c(...)) if (!missing(.order_by)) { query <- paste(query, "ORDER BY", .order_by) } query } trivial_statement <- function(ctx, table_name) { ctx$tweaks$create_table_as(table_name, trivial_query()) } trivial_query <- function(n = 1L, column = "a", .order_by = NULL, .ctx = NULL) { value <- trivial_values(n) if (length(column) == n) { query <- paste0("SELECT ", paste0(value, " AS ", column, collapse = ", ")) } else { query <- union( paste0("SELECT ", value, " AS ", column), .order_by = .order_by, .ctx = .ctx ) } query } trivial_values <- function(n = 1L) { seq_len(n) + 0.5 } trivial_df <- function(n = 1L, column = "a") { values <- trivial_values(n) if (length(column) == 1) { df <- data.frame(a = values) } else { df <- as.data.frame(as.list(values)) } names(df) <- column df } DBItest/R/test-stress.R0000644000176200001440000000061314157034355014403 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_stress()]: #' Stress tests (not tested with `test_all`) NULL #' Stress tests #' #' @inheritParams test_all #' @include test-compliance.R #' @family tests #' @keywords internal #' @export test_stress <- function(skip = NULL, ctx = get_default_context()) { test_suite <- "Stress" run_tests(ctx, spec_stress, skip, test_suite) } DBItest/R/test-sql.R0000644000176200001440000000055314157034343013657 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_sql()]: #' Test SQL methods NULL #' Test SQL methods #' #' @inheritParams test_all #' @include test-result.R #' @family tests #' @export test_sql <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "SQL" run_tests(ctx, spec_sql, skip, run_only, test_suite) } DBItest/R/spec-meta-get-info-result.R0000644000176200001440000000241514157037563017011 0ustar liggesusers#' spec_meta_get_info_result #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL #' @name spec_get_info spec_meta_get_info_result <- list( get_info_result = function(ctx, con) { #' @return #' For objects of class [DBIResult-class], `dbGetInfo()` res <- local_result(dbSendQuery(con, trivial_query())) info <- dbGetInfo(res) #' returns a named list expect_type(info, "list") info_names <- names(info) #' that contains at least the following components: #' necessary_names <- c( #' - `statatment`: the statement used with [dbSendQuery()] or [dbExecute()], #' as returned by [dbGetStatement()], "statement", #' - `row.count`: the number of rows fetched so far (for queries), #' as returned by [dbGetRowCount()], "row.count", #' - `rows.affected`: the number of rows affected (for statements), #' as returned by [dbGetRowsAffected()] "rows.affected", #' - `has.completed`: a logical that indicates #' if the query or statement has completed, #' as returned by [dbHasCompleted()]. "has.completed" ) for (name in necessary_names) { eval(bquote( expect_true(.(name) %in% info_names) )) } }, # NULL ) DBItest/R/spec-sql-unquote-identifier.R0000644000176200001440000001533314157037563017461 0ustar liggesusers#' spec_sql_unquote_identifier #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_unquote_identifier <- list( unquote_identifier_formals = function() { # expect_equal(names(formals(dbUnquoteIdentifier)), c("conn", "x", "...")) }, #' @return unquote_identifier_return = function(con) { #' `dbUnquoteIdentifier()` returns a list of objects simple_in <- dbQuoteIdentifier(con, "simple") simple_out <- dbUnquoteIdentifier(con, simple_in) expect_is(simple_out, "list") }, # unquote_identifier_vectorized = function(ctx, con) { #' of the same length as the input. simple_in <- dbQuoteIdentifier(con, "simple") simple_out <- dbUnquoteIdentifier(con, simple_in) expect_equal(length(simple_out), 1L) letters_in <- dbQuoteIdentifier(con, letters) letters_out <- dbUnquoteIdentifier(con, letters_in) expect_equal(length(letters_out), length(letters_in)) #' For an empty character vector this function returns a length-0 object. empty <- character() empty_in <- dbQuoteIdentifier(con, empty) empty_out <- dbUnquoteIdentifier(con, empty_in) expect_equal(length(empty_out), 0) #' The names of the input argument are preserved in the output. unnamed_in <- dbQuoteIdentifier(con, letters) unnamed_out <- dbUnquoteIdentifier(con, unnamed_in) expect_null(names(unnamed_out)) named_in <- dbQuoteIdentifier(con, stats::setNames(LETTERS[1:3], letters[1:3])) named_out <- dbUnquoteIdentifier(con, named_in) expect_equal(names(named_out), letters[1:3]) #' When passing the first element of a returned object again to #' `dbUnquoteIdentifier()` as `x` #' argument, it is returned unchanged (but wrapped in a list). expect_identical(dbUnquoteIdentifier(con, simple_out[[1]]), simple_out) expect_identical(dbUnquoteIdentifier(con, letters_out[[1]]), letters_out[1]) #' Passing objects of class [Id] should also return them unchanged (but wrapped in a list). expect_identical(dbUnquoteIdentifier(con, Id(table = "simple")), list(Id(table = "simple"))) #' (For backends it may be most convenient to return [Id] objects #' to achieve this behavior, but this is not required.) }, #' #' @section Failure modes: unquote_identifier_error = function(ctx, con) { #' #' An error is raised if plain character vectors are passed as the `x` #' argument. expect_error(dbUnquoteIdentifier(con, NA_character_)) expect_error(dbUnquoteIdentifier(con, c("a", NA_character_))) expect_error(dbUnquoteIdentifier(con, character())) }, #' @section Specification: #' For any character vector of length one, quoting (with [dbQuoteIdentifier()]) #' then unquoting then quoting the first element is identical to just quoting. unquote_identifier_roundtrip = function(ctx, con) { simple_in <- dbQuoteIdentifier(con, "simple") simple_out <- dbUnquoteIdentifier(con, simple_in) simple_roundtrip <- dbQuoteIdentifier(con, simple_out[[1]]) expect_identical(simple_in, simple_roundtrip) }, # unquote_identifier_special = function(ctx, con) { #' This is also true for strings that #' contain special characters such as a space, with_space_in <- dbQuoteIdentifier(con, "with space") with_space_out <- dbUnquoteIdentifier(con, with_space_in) with_space_roundtrip <- dbQuoteIdentifier(con, with_space_out[[1]]) #' a dot, with_dot_in <- dbQuoteIdentifier(con, "with.dot") with_dot_out <- dbUnquoteIdentifier(con, with_dot_in) with_dot_roundtrip <- dbQuoteIdentifier(con, with_dot_out[[1]]) #' a comma, with_comma_in <- dbQuoteIdentifier(con, "with,comma") with_comma_out <- dbUnquoteIdentifier(con, with_comma_in) with_comma_roundtrip <- dbQuoteIdentifier(con, with_comma_out[[1]]) #' or quotes used to mark strings with_quote_in <- dbQuoteIdentifier(con, as.character(dbQuoteString(con, "a"))) with_quote_out <- dbUnquoteIdentifier(con, with_quote_in) with_quote_roundtrip <- dbQuoteIdentifier(con, with_quote_out[[1]]) #' or identifiers, quoted_with_space_in <- dbQuoteIdentifier(con, as.character(with_space_in)) quoted_with_space_out <- dbUnquoteIdentifier(con, quoted_with_space_in) quoted_with_space_roundtrip <- dbQuoteIdentifier(con, quoted_with_space_out[[1]]) quoted_with_dot_in <- dbQuoteIdentifier(con, as.character(with_dot_in)) quoted_with_dot_out <- dbUnquoteIdentifier(con, quoted_with_dot_in) quoted_with_dot_roundtrip <- dbQuoteIdentifier(con, quoted_with_dot_out[[1]]) quoted_with_comma_in <- dbQuoteIdentifier(con, as.character(with_comma_in)) quoted_with_comma_out <- dbUnquoteIdentifier(con, quoted_with_comma_in) quoted_with_comma_roundtrip <- dbQuoteIdentifier(con, quoted_with_comma_out[[1]]) quoted_with_quote_in <- dbQuoteIdentifier(con, as.character(with_quote_in)) quoted_with_quote_out <- dbUnquoteIdentifier(con, quoted_with_quote_in) quoted_with_quote_roundtrip <- dbQuoteIdentifier(con, quoted_with_quote_out[[1]]) #' if the database supports this. if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } expect_identical(with_space_in, with_space_roundtrip) expect_identical(with_dot_in, with_dot_roundtrip) expect_identical(with_comma_in, with_comma_roundtrip) expect_identical(with_quote_in, with_quote_roundtrip) expect_identical(quoted_with_space_in, quoted_with_space_roundtrip) expect_identical(quoted_with_dot_in, quoted_with_dot_roundtrip) expect_identical(quoted_with_comma_in, quoted_with_comma_roundtrip) expect_identical(quoted_with_quote_in, quoted_with_quote_roundtrip) }, #' #' Unquoting simple strings (consisting of only letters) wrapped with [SQL()] #' and then quoting via [dbQuoteIdentifier()] gives the same result as just #' quoting the string. unquote_identifier_simple = function(ctx, con) { simple_in <- "simple" simple_quoted <- dbQuoteIdentifier(con, simple_in) simple_out <- dbUnquoteIdentifier(con, SQL(simple_in)) simple_roundtrip <- dbQuoteIdentifier(con, simple_out[[1]]) expect_identical(simple_roundtrip, simple_quoted) }, #' Similarly, unquoting expressions of the form `SQL("schema.table")` #' and then quoting gives the same result as quoting the identifier #' constructed by `Id(schema = "schema", table = "table")`. unquote_identifier_table_schema = function(ctx, con) { schema_in <- "schema" table_in <- "table" simple_quoted <- dbQuoteIdentifier(con, Id(schema = schema_in, table = table_in)) simple_out <- dbUnquoteIdentifier(con, SQL(paste0(schema_in, ".", table_in))) simple_roundtrip <- dbQuoteIdentifier(con, simple_out[[1]]) expect_identical(simple_roundtrip, simple_quoted) }, # NULL ) DBItest/R/spec-driver-connect.R0000644000176200001440000000601714157037563015765 0ustar liggesusers#' spec_driver_connect #' @family driver specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_driver_connect <- list( connect_formals = function() { # expect_equal(names(formals(dbConnect)), c("drv", "...")) }, #' @return connect_can_connect = function(ctx) { con <- expect_visible(connect(ctx)) #' `dbConnect()` returns an S4 object that inherits from [DBIConnection-class]. expect_s4_class(con, "DBIConnection") dbDisconnect(con) #' This object is used to communicate with the database engine. }, # connect_format = function(con) { #' #' A [format()] method is defined for the connection object. desc <- format(con) #' It returns a string that consists of a single line of text. expect_is(desc, "character") expect_length(desc, 1) expect_false(grepl("\n", desc, fixed = TRUE)) }, #' @section Specification: #' DBI recommends using the following argument names for authentication #' parameters, with `NULL` default: #' - `user` for the user name (default: current user) #' - `password` for the password #' - `host` for the host name (default: local connection) #' - `port` for the port number (default: local connection) #' - `dbname` for the name of the database on the host, or the database file #' name #' #' The defaults should provide reasonable behavior, in particular a #' local connection for `host = NULL`. For some DBMS (e.g., PostgreSQL), #' this is different to a TCP/IP connection to `localhost`. #' #' In addition, DBI supports the `bigint` argument that governs how #' 64-bit integer data is returned. The following values are supported: connect_bigint_integer = function(ctx) { #' - `"integer"`: always return as `integer`, silently overflow con <- local_connection(ctx, bigint = "integer") res <- dbGetQuery(con, "SELECT 10000000000") expect_type(res[[1]], "integer") }, # connect_bigint_numeric = function(ctx) { #' - `"numeric"`: always return as `numeric`, silently round con <- local_connection(ctx, bigint = "numeric") res <- dbGetQuery(con, "SELECT 10000000000") expect_type(res[[1]], "double") expect_equal(res[[1]], 1e10) }, # connect_bigint_character = function(ctx) { #' - `"character"`: always return the decimal representation as `character` con <- local_connection(ctx, bigint = "character") res <- dbGetQuery(con, "SELECT 10000000000") expect_type(res[[1]], "character") expect_equal(res[[1]], "10000000000") }, # connect_bigint_integer64 = function(ctx) { #' - `"integer64"`: return as a data type that can be coerced using #' [as.integer()] (with warning on overflow), [as.numeric()] #' and [as.character()] con <- local_connection(ctx, bigint = "integer64") res <- dbGetQuery(con, "SELECT 10000000000") expect_warning(expect_true(is.na(as.integer(res[[1]])))) expect_equal(as.numeric(res[[1]]), 1e10) expect_equal(as.character(res[[1]]), "10000000000") }, # NULL ) DBItest/R/spec-meta-has-completed.R0000644000176200001440000000465114157037563016516 0ustar liggesusers#' spec_meta_has_completed #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_has_completed <- list( has_completed_formals = function() { # expect_equal(names(formals(dbHasCompleted)), c("res", "...")) }, #' @return #' `dbHasCompleted()` returns a logical scalar. has_completed_query = function(con) { #' For a query initiated by [dbSendQuery()] with non-empty result set, res <- local_result(dbSendQuery(con, trivial_query())) #' `dbHasCompleted()` returns `FALSE` initially expect_false(expect_visible(dbHasCompleted(res))) #' and `TRUE` after calling [dbFetch()] without limit. check_df(dbFetch(res)) expect_true(expect_visible(dbHasCompleted(res))) }, # has_completed_statement = function(con, table_name) { #' For a query initiated by [dbSendStatement()], res <- local_result(dbSendStatement(con, paste0("CREATE TABLE ", table_name, " (a integer)"))) #' `dbHasCompleted()` always returns `TRUE`. expect_true(expect_visible(dbHasCompleted(res))) }, #' #' @section Failure modes: has_completed_error = function(con) { res <- dbSendQuery(con, trivial_query()) dbClearResult(res) #' Attempting to query completion status for a result set cleared with #' [dbClearResult()] gives an error. expect_error(dbHasCompleted(res)) }, #' @section Specification: has_completed_query_spec = function(con) { #' The completion status for a query is only guaranteed to be set to #' `FALSE` after attempting to fetch past the end of the entire result. #' Therefore, for a query with an empty result set, res <- local_result(dbSendQuery(con, "SELECT * FROM (SELECT 1 as a) AS x WHERE (1 = 0)")) #' the initial return value is unspecified, #' but the result value is `TRUE` after trying to fetch only one row. check_df(dbFetch(res, 1)) expect_true(expect_visible(dbHasCompleted(res))) }, #' @section Specification: has_completed_query_spec_partial = function(con) { #' Similarly, for a query with a result set of length n, res <- local_result(dbSendQuery(con, trivial_query())) #' the return value is unspecified after fetching n rows, check_df(dbFetch(res, 1)) #' but the result value is `TRUE` after trying to fetch only one more #' row. check_df(dbFetch(res, 1)) expect_true(expect_visible(dbHasCompleted(res))) }, # NULL ) DBItest/R/spec-sql-write-table.R0000644000176200001440000007050114157037563016056 0ustar liggesusers#' spec_sql_write_table #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL #' @importFrom lubridate with_tz spec_sql_write_table <- list( write_table_formals = function() { # expect_equal(names(formals(dbWriteTable)), c("conn", "name", "value", "...")) }, #' @return #' `dbWriteTable()` returns `TRUE`, invisibly. write_table_return = function(con, table_name) { expect_invisible_true(dbWriteTable(con, table_name, data.frame(a = 1L))) }, #' #' @section Failure modes: #' If the table exists, and both `append` and `overwrite` arguments are unset, write_table_overwrite = function(con, table_name) { test_in <- data.frame(a = 1L) dbWriteTable(con, table_name, test_in) expect_error(dbWriteTable(con, table_name, data.frame(a = 2L))) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, #' or `append = TRUE` and the data frame with the new data has different #' column names, #' an error is raised; the remote table remains unchanged. write_table_append_incompatible = function(con, table_name) { test_in <- data.frame(a = 1L) dbWriteTable(con, table_name, test_in) expect_error(dbWriteTable(con, table_name, data.frame(b = 2L), append = TRUE)) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, #' #' An error is raised when calling this method for a closed write_table_closed_connection = function(ctx, closed_con) { expect_error(dbWriteTable(closed_con, "test", data.frame(a = 1))) }, #' or invalid connection. write_table_invalid_connection = function(ctx, invalid_con) { expect_error(dbWriteTable(invalid_con, "test", data.frame(a = 1))) }, #' An error is also raised write_table_error = function(ctx, con, table_name) { test_in <- data.frame(a = 1L) #' if `name` cannot be processed with [dbQuoteIdentifier()] expect_error(dbWriteTable(con, NA, test_in)) #' or if this results in a non-scalar. expect_error(dbWriteTable(con, c(table_name, table_name), test_in)) #' Invalid values for the additional arguments `row.names`, #' `overwrite`, `append`, `field.types`, and `temporary` #' (non-scalars, expect_error(dbWriteTable(con, table_name, test_in, row.names = letters)) expect_error(dbWriteTable(con, table_name, test_in, overwrite = c(TRUE, FALSE))) expect_error(dbWriteTable(con, table_name, test_in, append = c(TRUE, FALSE))) expect_error(dbWriteTable(con, table_name, test_in, temporary = c(TRUE, FALSE))) #' unsupported data types, expect_error(dbWriteTable(con, table_name, test_in, row.names = list(1L))) expect_error(dbWriteTable(con, table_name, test_in, overwrite = 1L)) expect_error(dbWriteTable(con, table_name, test_in, append = 1L)) expect_error(dbWriteTable(con, table_name, test_in, field.types = 1L)) expect_error(dbWriteTable(con, table_name, test_in, temporary = 1L)) #' `NA`, expect_error(dbWriteTable(con, table_name, test_in, overwrite = NA)) expect_error(dbWriteTable(con, table_name, test_in, append = NA)) expect_error(dbWriteTable(con, table_name, test_in, field.types = NA)) expect_error(dbWriteTable(con, table_name, test_in, temporary = NA)) #' incompatible values, expect_error(dbWriteTable(con, table_name, test_in, field.types = letters)) expect_error(dbWriteTable(con, table_name, test_in, field.types = c(b = "INTEGER"))) expect_error(dbWriteTable(con, table_name, test_in, overwrite = TRUE, append = TRUE)) expect_error(dbWriteTable(con, table_name, test_in, append = TRUE, field.types = c(a = "INTEGER"))) #' duplicate expect_error(dbWriteTable(con, table_name, test_in, field.types = c(a = "INTEGER", a = "INTEGER"))) #' or missing names, expect_error(dbWriteTable(con, table_name, test_in, field.types = c("INTEGER"))) #' incompatible columns) dbWriteTable(con, table_name, test_in) expect_error(dbWriteTable(con, table_name, data.frame(b = 2L, c = 3L), append = TRUE)) #' also raise an error. }, #' @section Additional arguments: #' The following arguments are not part of the `dbWriteTable()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `row.names` (default: `FALSE`) #' - `overwrite` (default: `FALSE`) #' - `append` (default: `FALSE`) #' - `field.types` (default: `NULL`) #' - `temporary` (default: `FALSE`) #' #' They must be provided as named arguments. #' See the "Specification" and "Value" sections for details on their usage. #' @section Specification: #' The `name` argument is processed as follows, write_table_name = function(ctx, con) { #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- data.frame(a = 1) local_remove_test_table(con, table_name) #' - If an unquoted table name as string: `dbWriteTable()` will do the quoting, dbWriteTable(con, table_name, test_in) test_out <- check_df(dbReadTable(con, dbQuoteIdentifier(con, table_name))) expect_equal_df(test_out, test_in) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` } }, #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done write_table_name_quoted = function(ctx, con) { if (as.package_version(ctx$tweaks$dbitest_version) < "1.7.2") { skip(paste0("tweak: dbitest_version: ", ctx$tweaks$dbitest_version)) } if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- data.frame(a = 1) local_remove_test_table(con, table_name) dbWriteTable(con, dbQuoteIdentifier(con, table_name), test_in) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) } }, #' #' The `value` argument must be a data frame write_table_value_df = function(con, table_name) { test_in <- trivial_df() dbWriteTable(con, table_name, test_in) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, #' with a subset of the columns of the existing table if `append = TRUE`. write_table_value_subset = function(ctx, con, table_name) { test_in <- trivial_df(3, letters[1:3]) dbCreateTable(con, table_name, test_in) dbWriteTable(con, table_name, test_in[2], append = TRUE) test_out <- check_df(dbReadTable(con, table_name)) test_in[c(1, 3)] <- NA_real_ expect_equal_df(test_out, test_in) }, #' The order of the columns does not matter with `append = TRUE`. write_table_value_shuffle = function(ctx, con, table_name) { test_in <- trivial_df(3, letters[1:3]) dbCreateTable(con, table_name, test_in) dbWriteTable(con, table_name, test_in[c(2, 3, 1)], append = TRUE) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, # write_table_value_shuffle_subset = function(ctx, con, table_name) { test_in <- trivial_df(4, letters[1:4]) dbCreateTable(con, table_name, test_in) dbWriteTable(con, table_name, test_in[c(4, 1, 3)], append = TRUE) test_out <- check_df(dbReadTable(con, table_name)) test_in[2] <- NA_real_ expect_equal_df(test_out, test_in) }, #' #' If the `overwrite` argument is `TRUE`, an existing table of the same name #' will be overwritten. overwrite_table = function(ctx, con, table_name) { penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) expect_error( dbWriteTable(con, table_name, penguins[1, ], overwrite = TRUE), NA ) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins[1, ]) }, #' This argument doesn't change behavior if the table does not exist yet. overwrite_table_missing = function(ctx, con, table_name) { penguins_in <- get_penguins(ctx) expect_error( dbWriteTable(con, table_name, penguins_in[1, ], overwrite = TRUE), NA ) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins_in[1, ]) }, #' #' If the `append` argument is `TRUE`, the rows in an existing table are #' preserved, and the new data are appended. append_table = function(ctx, con, table_name) { penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) expect_error(dbWriteTable(con, table_name, penguins[1, ], append = TRUE), NA) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, rbind(penguins, penguins[1, ])) }, #' If the table doesn't exist yet, it is created. append_table_new = function(ctx, con, table_name) { penguins <- get_penguins(ctx) expect_error(dbWriteTable(con, table_name, penguins[1, ], append = TRUE), NA) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins[1, ]) }, #' #' If the `temporary` argument is `TRUE`, the table is not available in a #' second connection and is gone after reconnecting. temporary_table = function(ctx, con, table_name = "dbit08") { #' Not all backends support this argument. if (!isTRUE(ctx$tweaks$temporary_tables)) { skip("tweak: temporary_tables") } penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins, temporary = TRUE) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins) con2 <- local_connection(ctx) expect_error(dbReadTable(con2, table_name)) }, # second stage temporary_table = function(ctx, con) { if (!isTRUE(ctx$tweaks$temporary_tables)) { skip("tweak: temporary_tables") } table_name <- "dbit08" expect_error(dbReadTable(con, table_name)) }, #' A regular, non-temporary table is visible in a second connection, table_visible_in_other_connection = function(ctx, local_con) { penguins30 <- get_penguins(ctx) table_name <- "dbit09" dbWriteTable(local_con, table_name, penguins30) penguins_out <- check_df(dbReadTable(local_con, table_name)) expect_equal_df(penguins_out, penguins30) con2 <- local_connection(ctx) expect_equal_df(dbReadTable(con2, table_name), penguins30) }, # second stage table_visible_in_other_connection = function(ctx, con) { #' in a pre-existing connection, penguins30 <- get_penguins(ctx) table_name <- "dbit09" expect_equal_df(check_df(dbReadTable(con, table_name)), penguins30) }, # third stage table_visible_in_other_connection = function(ctx, local_con, table_name = "dbit09") { #' and after reconnecting to the database. penguins30 <- get_penguins(ctx) expect_equal_df(check_df(dbReadTable(local_con, table_name)), penguins30) }, #' #' SQL keywords can be used freely in table names, column names, and data. roundtrip_keywords = function(ctx, con) { tbl_in <- data.frame( select = "unique", from = "join", where = "order", stringsAsFactors = FALSE ) test_table_roundtrip(con, tbl_in, name = "exists") }, #' Quotes, commas, spaces, and other special characters such as newlines and tabs, #' can also be used in the data, roundtrip_quotes = function(ctx, con, table_name) { tbl_in <- data.frame( as.character(dbQuoteString(con, "")), as.character(dbQuoteIdentifier(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\Nb", "a\\tb", "a\\rb", "a\\bb", "a\\Zb", stringsAsFactors = FALSE ) names(tbl_in) <- letters[seq_along(tbl_in)] test_table_roundtrip(con, tbl_in) }, #' and, if the database supports non-syntactic identifiers, #' also for table names roundtrip_quotes_table_names = function(ctx, con) { if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } table_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\Nb", "a\\tb", "a\\rb", "a\\bb", "a\\Zb" ) tbl_in <- trivial_df() for (table_name in table_names) { test_table_roundtrip_one(con, tbl_in, .add_na = "none") } }, #' and column names. roundtrip_quotes_column_names = function(ctx, con) { if (as.package_version(ctx$tweaks$dbitest_version) < "1.7.2") { skip(paste0("tweak: dbitest_version: ", ctx$tweaks$dbitest_version)) } if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } column_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\nb", "a\\tb", "a\\rb", "a\\bb", "a\\zb" ) tbl_in <- trivial_df(length(column_names), column_names) test_table_roundtrip_one(con, tbl_in, .add_na = "none") }, #' #' The following data types must be supported at least, #' and be read identically with [dbReadTable()]: #' - integer roundtrip_integer = function(ctx, con) { tbl_in <- data.frame(a = c(1:5)) test_table_roundtrip(con, tbl_in) }, #' - numeric roundtrip_numeric = function(ctx, con) { tbl_in <- data.frame(a = c(seq(1, 3, by = 0.5))) test_table_roundtrip(con, tbl_in) #' (the behavior for `Inf` and `NaN` is not specified) }, #' - logical roundtrip_logical = function(ctx, con) { tbl_in <- data.frame(a = c(TRUE, FALSE, NA)) tbl_exp <- tbl_in tbl_exp$a <- ctx$tweaks$logical_return(tbl_exp$a) test_table_roundtrip(con, tbl_in, tbl_exp) }, #' - `NA` as NULL roundtrip_null = function(ctx, con) { tbl_in <- data.frame(a = NA) test_table_roundtrip( con, tbl_in, transform = function(tbl_out) { tbl_out$a <- as.logical(tbl_out$a) # Plain NA is of type logical tbl_out } ) }, #' - 64-bit values (using `"bigint"` as field type); the result can be roundtrip_64_bit_numeric = function(ctx, con) { tbl_in <- data.frame(a = c(-1e14, 1e15)) test_table_roundtrip( con, tbl_in, transform = function(tbl_out) { #' - converted to a numeric, which may lose precision, tbl_out$a <- as.numeric(tbl_out$a) tbl_out }, field.types = c(a = "BIGINT") ) }, # roundtrip_64_bit_character = function(ctx, con) { tbl_in <- data.frame(a = c(-1e14, 1e15)) tbl_exp <- tbl_in tbl_exp$a <- format(tbl_exp$a, scientific = FALSE) test_table_roundtrip( con, tbl_in, tbl_exp, transform = function(tbl_out) { #' - converted a character vector, which gives the full decimal #' representation tbl_out$a <- as.character(tbl_out$a) tbl_out }, field.types = c(a = "BIGINT") ) }, # roundtrip_64_bit_roundtrip = function(con, table_name) { tbl_in <- data.frame(a = c(-1e14, 1e15)) dbWriteTable(con, table_name, tbl_in, field.types = c(a = "BIGINT")) tbl_out <- dbReadTable(con, table_name) #' - written to another table and read again unchanged test_table_roundtrip(con, tbl_out, tbl_expected = tbl_out) }, #' - character (in both UTF-8 roundtrip_character = function(ctx, con) { tbl_in <- data.frame( id = seq_along(texts), a = c(texts), stringsAsFactors = FALSE ) test_table_roundtrip(con, tbl_in) }, #' and native encodings), roundtrip_character_native = function(ctx, con) { tbl_in <- data.frame( a = c(enc2native(texts)), stringsAsFactors = FALSE ) test_table_roundtrip(con, tbl_in) }, #' supporting empty strings roundtrip_character_empty = function(ctx, con) { tbl_in <- data.frame( a = c("", "a"), stringsAsFactors = FALSE ) test_table_roundtrip(con, tbl_in) }, #' before and after a non-empty string roundtrip_character_empty_after = function(ctx, con) { tbl_in <- data.frame( a = c("a", ""), stringsAsFactors = FALSE ) test_table_roundtrip(con, tbl_in) }, #' - factor (returned as character) roundtrip_factor = function(ctx, con) { tbl_in <- data.frame( a = factor(c(texts)) ) tbl_exp <- tbl_in tbl_exp$a <- as.character(tbl_exp$a) test_table_roundtrip(con, tbl_in, tbl_exp) }, #' - list of raw roundtrip_raw = function(ctx, con) { #' (if supported by the database) if (isTRUE(ctx$tweaks$omit_blob_tests)) { skip("tweak: omit_blob_tests") } tbl_in <- data.frame(id = 1L, a = I(list(as.raw(0:10)))) tbl_exp <- tbl_in tbl_exp$a <- blob::as_blob(unclass(tbl_in$a)) test_table_roundtrip( con, tbl_in, tbl_exp, transform = function(tbl_out) { tbl_out$a <- blob::as_blob(tbl_out$a) tbl_out } ) }, #' - objects of type [blob::blob] roundtrip_blob = function(ctx, con) { #' (if supported by the database) if (isTRUE(ctx$tweaks$omit_blob_tests)) { skip("tweak: omit_blob_tests") } tbl_in <- data.frame(id = 1L, a = blob::blob(as.raw(0:10))) test_table_roundtrip( con, tbl_in, transform = function(tbl_out) { tbl_out$a <- blob::as_blob(tbl_out$a) tbl_out } ) }, #' - date roundtrip_date = function(ctx, con) { #' (if supported by the database; if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } #' returned as `Date`), tbl_in <- data.frame(a = as_numeric_date(c(Sys.Date() + 1:5))) test_table_roundtrip( con, tbl_in, transform = function(tbl_out) { expect_type(unclass(tbl_out$a), "double") tbl_out } ) }, #' also for dates prior to 1970 or 1900 or after 2038 roundtrip_date_extended = function(ctx, con) { if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } tbl_in <- data.frame(a = as_numeric_date(c( "1811-11-11", "1899-12-31", "1900-01-01", "1950-05-05", "1969-12-31", "1970-01-01", "2037-01-01", "2038-01-01", "2040-01-01", "2999-09-09" ))) test_table_roundtrip( con, tbl_in, transform = function(tbl_out) { expect_type(unclass(tbl_out$a), "double") tbl_out } ) }, #' - time roundtrip_time = function(ctx, con) { #' (if supported by the database; if (!isTRUE(ctx$tweaks$time_typed)) { skip("tweak: !time_typed") } tbl_in <- data.frame(a = hms::hms(minutes = 1:5)) tbl_in$b <- .difftime(as.numeric(tbl_in$a) / 60, "mins") tbl_exp <- tbl_in tbl_exp$a <- hms::as_hms(tbl_exp$a) tbl_exp$b <- hms::as_hms(tbl_exp$b) test_table_roundtrip( con, tbl_in, tbl_exp, transform = function(tbl_out) { #' returned as objects that inherit from `difftime`) expect_s3_class(tbl_out$a, "difftime") expect_s3_class(tbl_out$b, "difftime") tbl_out$a <- hms::as_hms(tbl_out$a) tbl_out$b <- hms::as_hms(tbl_out$b) tbl_out } ) }, #' - timestamp roundtrip_timestamp = function(ctx, con) { #' (if supported by the database; if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } #' returned as `POSIXct` local <- round(Sys.time()) + c( 1, 60, 3600, 86400, 86400 * 90, 86400 * 180, 86400 * 270, 1e9, 5e9 ) attr(local, "tzone") <- "" tbl_in <- data.frame(id = seq_along(local)) tbl_in$local <- local tbl_in$gmt <- lubridate::with_tz(local, tzone = "GMT") tbl_in$pst8pdt <- lubridate::with_tz(local, tzone = "PST8PDT") tbl_in$utc <- lubridate::with_tz(local, tzone = "UTC") #' respecting the time zone but not necessarily preserving the #' input time zone), test_table_roundtrip( con, tbl_in, transform = function(out) { dates <- vapply(out, inherits, "POSIXt", FUN.VALUE = logical(1L)) tz <- toupper(names(out)) tz[tz == "LOCAL"] <- "" out[dates] <- Map(lubridate::with_tz, out[dates], tz[dates]) out } ) }, #' also for timestamps prior to 1970 or 1900 or after 2038 roundtrip_timestamp_extended = function(ctx, con) { if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } local <- as.POSIXct(c( "1811-11-11", "1899-12-31", "1900-01-01", "1950-05-05", "1969-12-31", "1970-01-01", "2037-01-01", "2038-01-01", "2040-01-01", "2999-09-09" )) attr(local, "tzone") <- "" tbl_in <- data.frame(id = seq_along(local)) tbl_in$local <- local tbl_in$gmt <- lubridate::with_tz(local, tzone = "GMT") tbl_in$pst8pdt <- lubridate::with_tz(local, tzone = "PST8PDT") tbl_in$utc <- lubridate::with_tz(local, tzone = "UTC") #' respecting the time zone but not necessarily preserving the #' input time zone) test_table_roundtrip( con, tbl_in, transform = function(out) { dates <- vapply(out, inherits, "POSIXt", FUN.VALUE = logical(1L)) tz <- toupper(names(out)) tz[tz == "LOCAL"] <- "" out[dates] <- Map(lubridate::with_tz, out[dates], tz[dates]) out } ) }, #' #' Mixing column types in the same table is supported. roundtrip_mixed = function(ctx, con) { data <- list("a", 1L, 1.5) data <- lapply(data, c, NA) expanded <- expand.grid(a = data, b = data, c = data) tbl_in_list <- lapply( seq_len(nrow(expanded)), function(i) { as.data.frame(lapply(expanded[i, ], unlist, recursive = FALSE)) } ) lapply(tbl_in_list, test_table_roundtrip, con = con) }, #' #' The `field.types` argument must be a named character vector with at most #' one entry for each column. #' It indicates the SQL data type to be used for a new column. roundtrip_field_types = function(ctx, con) { tbl_in <- data.frame(a = numeric(), b = character()) #' If a column is missed from `field.types`, the type is inferred #' from the input data with [dbDataType()]. tbl_exp <- data.frame(a = integer(), b = character()) test_table_roundtrip( con, tbl_in, tbl_exp, field.types = c(a = "INTEGER") ) tbl_in <- data.frame(a = numeric(), b = integer()) tbl_exp <- data.frame(a = integer(), b = numeric()) test_table_roundtrip( con, tbl_in, tbl_exp, field.types = c(b = "REAL", a = "INTEGER") ) }, #' #' The interpretation of [rownames] depends on the `row.names` argument, #' see [sqlRownamesToColumn()] for details: write_table_row_names_false = function(ctx, con) { #' - If `FALSE` or `NULL`, row names are ignored. for (row.names in list(FALSE, NULL)) { table_name <- random_table_name() local_remove_test_table(con, table_name) mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = row.names) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_false("row_names" %in% names(mtcars_out)) expect_equal_df(mtcars_out, unrowname(mtcars_in)) } }, # write_table_row_names_true_exists = function(ctx, con, table_name) { #' - If `TRUE`, row names are converted to a column named "row_names", row.names <- TRUE mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = row.names) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_true("row_names" %in% names(mtcars_out)) expect_true(all(rownames(mtcars_in) %in% mtcars_out$row_names)) expect_true(all(mtcars_out$row_names %in% rownames(mtcars_in))) expect_equal_df(mtcars_out[names(mtcars_out) != "row_names"], unrowname(mtcars_in)) }, # write_table_row_names_true_missing = function(ctx, con, table_name) { #' even if the input data frame only has natural row names from 1 to `nrow(...)`. row.names <- TRUE penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in, row.names = row.names) penguins_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_true("row_names" %in% names(penguins_out)) expect_true(all(rownames(penguins_in) %in% penguins_out$row_names)) expect_true(all(penguins_out$row_names %in% rownames(penguins_in))) expect_equal_df(penguins_out[names(penguins_out) != "row_names"], penguins_in) }, # write_table_row_names_na_exists = function(ctx, con, table_name) { #' - If `NA`, a column named "row_names" is created if the data has custom row names, row.names <- NA mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = row.names) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_true("row_names" %in% names(mtcars_out)) expect_true(all(rownames(mtcars_in) %in% mtcars_out$row_names)) expect_true(all(mtcars_out$row_names %in% rownames(mtcars_in))) expect_equal_df(mtcars_out[names(mtcars_out) != "row_names"], unrowname(mtcars_in)) }, # write_table_row_names_na_missing = function(ctx, con, table_name) { #' no extra column is created in the case of natural row names. row.names <- NA penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in, row.names = row.names) penguins_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_equal_df(penguins_out, penguins_in) }, # write_table_row_names_string_exists = function(ctx, con, table_name) { row.names <- "make_model" #' - If a string, this specifies the name of the column in the remote table #' that contains the row names, mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = row.names) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_true("make_model" %in% names(mtcars_out)) expect_true(all(mtcars_out$make_model %in% rownames(mtcars_in))) expect_true(all(rownames(mtcars_in) %in% mtcars_out$make_model)) expect_equal_df(mtcars_out[names(mtcars_out) != "make_model"], unrowname(mtcars_in)) }, # write_table_row_names_string_missing = function(ctx, con, table_name) { row.names <- "seq" #' even if the input data frame only has natural row names. penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in, row.names = row.names) penguins_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_true("seq" %in% names(penguins_out)) expect_true(all(penguins_out$seq %in% rownames(penguins_in))) expect_true(all(rownames(penguins_in) %in% penguins_out$seq)) expect_equal_df(penguins_out[names(penguins_out) != "seq"], penguins_in) }, # write_table_row_names_default = function(ctx, con, table_name) { #' #' The default is `row.names = FALSE`. mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_false("row_names" %in% names(mtcars_out)) expect_equal_df(mtcars_out, unrowname(mtcars_in)) }, # NULL ) test_table_roundtrip <- function(...) { test_table_roundtrip_one(..., .add_na = "none") test_table_roundtrip_one(..., .add_na = "above") test_table_roundtrip_one(..., .add_na = "below") } test_table_roundtrip_one <- function(con, tbl_in, tbl_expected = tbl_in, transform = identity, name = NULL, field.types = NULL, use_append = FALSE, .add_na = "none") { force(tbl_expected) if (.add_na == "above") { tbl_in <- add_na_above(tbl_in) tbl_expected <- add_na_above(tbl_expected) } else if (.add_na == "below") { tbl_in <- add_na_below(tbl_in) tbl_expected <- add_na_below(tbl_expected) } if (is.null(name)) { name <- random_table_name() } local_remove_test_table(con, name = name) if (use_append) { dbCreateTable(con, name, field.types %||% tbl_in) dbAppendTable(con, name, tbl_in) } else { dbWriteTable(con, name, tbl_in, field.types = field.types) } tbl_read <- check_df(dbReadTable(con, name, check.names = FALSE)) tbl_out <- transform(tbl_read) expect_equal_df(tbl_out, tbl_expected) } add_na_above <- function(tbl) { idx <- c(NA, seq_len(nrow(tbl))) tbl <- tbl[idx, , drop = FALSE] unrowname(tbl) } add_na_below <- function(tbl) { idx <- c(seq_len(nrow(tbl)), NA) tbl <- tbl[idx, , drop = FALSE] unrowname(tbl) } DBItest/R/spec-driver-constructor.R0000644000176200001440000000261314157037563016717 0ustar liggesusers#' spec_driver_constructor #' @family driver specifications #' @usage NULL #' @format NULL #' @keywords NULL #' @section Construction of the DBIDriver object: spec_driver_constructor <- list( constructor = function(ctx) { pkg_name <- package_name(ctx) #' The backend must support creation of an instance of its [DBIDriver-class] #' subclass #' with a \dfn{constructor function}. #' By default, its name is the package name without the leading \sQuote{R} #' (if it exists), e.g., `SQLite` for the \pkg{RSQLite} package. default_constructor_name <- gsub("^R", "", pkg_name) #' However, backend authors may choose a different name. constructor_name <- ctx$tweaks$constructor_name %||% default_constructor_name #' The constructor must be exported, and pkg_env <- getNamespace(pkg_name) eval(bquote( expect_true(.(constructor_name) %in% getNamespaceExports(pkg_env)) )) #' it must be a function eval(bquote( expect_true(exists(.(constructor_name), mode = "function", pkg_env)) )) constructor <- get(constructor_name, mode = "function", pkg_env) #' that is callable without arguments. expect_all_args_have_default_values(constructor) #' DBI recommends to define a constructor with an empty argument list. if (!isTRUE(ctx$tweaks$constructor_relax_args)) { expect_arglist_is_empty(constructor) } }, # NULL ) DBItest/R/spec-sql-create-table.R0000644000176200001440000002124314157037563016166 0ustar liggesusers#' spec_sql_create_table #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_create_table <- list( create_table_formals = function() { # expect_equal(names(formals(dbCreateTable)), c("conn", "name", "fields", "...", "row.names", "temporary")) }, #' @return #' `dbCreateTable()` returns `TRUE`, invisibly. create_table_return = function(con, table_name) { expect_invisible_true(dbCreateTable(con, table_name, trivial_df())) }, #' #' @section Failure modes: #' If the table exists, an error is raised; the remote table remains unchanged. create_table_overwrite = function(con, table_name) { test_in <- trivial_df() dbCreateTable(con, table_name, test_in) dbAppendTable(con, table_name, test_in) expect_error(dbCreateTable(con, table_name, data.frame(b = 1L))) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, #' #' An error is raised when calling this method for a closed create_table_closed_connection = function(ctx, closed_con) { expect_error(dbCreateTable(closed_con, "test", data.frame(a = 1))) }, #' or invalid connection. create_table_invalid_connection = function(ctx, invalid_con) { expect_error(dbCreateTable(invalid_con, "test", data.frame(a = 1))) }, #' An error is also raised create_table_error = function(ctx, con, table_name) { test_in <- data.frame(a = 1L) #' if `name` cannot be processed with [dbQuoteIdentifier()] expect_error(dbCreateTable(con, NA, test_in)) #' or if this results in a non-scalar. expect_error(dbCreateTable(con, c(table_name, table_name), test_in)) #' Invalid values for the `row.names` and `temporary` arguments #' (non-scalars, expect_error(dbCreateTable(con, table_name, test_in, row.names = letters)) expect_error(dbCreateTable(con, table_name, test_in, temporary = c(TRUE, FALSE))) #' unsupported data types, expect_error(dbCreateTable(con, table_name, test_in, row.names = list(1L))) expect_error(dbCreateTable(con, table_name, fields = 1L)) expect_error(dbCreateTable(con, table_name, test_in, temporary = 1L)) #' `NA`, expect_error(dbCreateTable(con, table_name, test_in, row.names = NA)) expect_error(dbCreateTable(con, table_name, fields = NA)) expect_error(dbCreateTable(con, table_name, test_in, temporary = NA)) #' incompatible values, expect_error(dbCreateTable(con, table_name, test_in, fields = letters)) #' duplicate names) expect_error(dbCreateTable(con, table_name, fields = c(a = "INTEGER", a = "INTEGER"))) #' also raise an error. }, #' @section Additional arguments: #' The following arguments are not part of the `dbCreateTable()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `temporary` (default: `FALSE`) #' #' They must be provided as named arguments. #' See the "Specification" and "Value" sections for details on their usage. #' @section Specification: #' The `name` argument is processed as follows, create_table_name = function(ctx, con) { #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- trivial_df() local_remove_test_table(con, table_name) #' - If an unquoted table name as string: `dbCreateTable()` will do the quoting, dbCreateTable(con, table_name, test_in) test_out <- check_df(dbReadTable(con, dbQuoteIdentifier(con, table_name))) expect_equal_df(test_out, test_in[0, , drop = FALSE]) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` } }, #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done create_table_name_quoted = function(ctx, con) { if (as.package_version(ctx$tweaks$dbitest_version) < "1.7.2") { skip(paste0("tweak: dbitest_version: ", ctx$tweaks$dbitest_version)) } if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- trivial_df() local_remove_test_table(con, table_name) dbCreateTable(con, dbQuoteIdentifier(con, table_name), test_in) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in[0, , drop = FALSE]) } }, #' #' If the `temporary` argument is `TRUE`, the table is not available in a #' second connection and is gone after reconnecting. create_temporary_table = function(ctx, con, table_name = "dbit03") { #' Not all backends support this argument. if (!isTRUE(ctx$tweaks$temporary_tables)) { skip("tweak: temporary_tables") } penguins <- get_penguins(ctx) dbCreateTable(con, table_name, penguins, temporary = TRUE) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins[0, , drop = FALSE]) con2 <- local_connection(ctx) expect_error(dbReadTable(con2, table_name)) }, # second stage create_temporary_table = function(con) { table_name <- "dbit03" expect_error(dbReadTable(con, table_name)) }, #' A regular, non-temporary table is visible in a second connection, create_table_visible_in_other_connection = function(ctx, local_con) { penguins <- get_penguins(ctx) table_name <- "dbit04" dbCreateTable(local_con, table_name, penguins) penguins_out <- check_df(dbReadTable(local_con, table_name)) expect_equal_df(penguins_out, penguins[0, , drop = FALSE]) con2 <- local_connection(ctx) expect_equal_df(dbReadTable(con2, table_name), penguins[0, , drop = FALSE]) }, # second stage create_table_visible_in_other_connection = function(ctx, con) { penguins <- get_penguins(ctx) table_name <- "dbit04" #' in a pre-existing connection, expect_equal_df(check_df(dbReadTable(con, table_name)), penguins[0, , drop = FALSE]) }, # third stage create_table_visible_in_other_connection = function(ctx, local_con, table_name = "dbit04") { penguins <- get_penguins(ctx) #' and after reconnecting to the database. expect_equal_df(check_df(dbReadTable(local_con, table_name)), penguins[0, , drop = FALSE]) }, #' #' SQL keywords can be used freely in table names, column names, and data. create_roundtrip_keywords = function(ctx, con) { tbl_in <- data.frame( select = "unique", from = "join", where = "order", stringsAsFactors = FALSE ) test_table_roundtrip(con, tbl_in, name = "exists") }, #' Quotes, commas, and spaces can also be used for table names and column names, #' if the database supports non-syntactic identifiers. create_roundtrip_quotes = function(ctx, con) { if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } table_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "," ) for (table_name in table_names) { tbl_in <- trivial_df(4, letters[1:4]) names(tbl_in) <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "," ) test_table_roundtrip(con, tbl_in) } }, #' #' The `row.names` argument must be missing create_table_row_names_default = function(ctx, con, table_name) { mtcars_in <- datasets::mtcars dbCreateTable(con, table_name, mtcars_in) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_false("row_names" %in% names(mtcars_out)) expect_equal_df(mtcars_out, unrowname(mtcars_in)[0, , drop = FALSE]) }, #' or `NULL`, the default value. create_table_row_names_null = function(ctx, con, table_name) { mtcars_in <- datasets::mtcars dbCreateTable(con, table_name, mtcars_in, row.names = NULL) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = NULL)) expect_false("row_names" %in% names(mtcars_out)) expect_equal_df(mtcars_out, unrowname(mtcars_in)[0, , drop = FALSE]) }, # create_table_row_names_non_null = function(ctx, con, table_name) { #' All other values for the `row.names` argument mtcars_in <- datasets::mtcars #' (in particular `TRUE`, expect_error(dbCreateTable(con, table_name, mtcars_in, row.names = TRUE)) #' `NA`, expect_error(dbCreateTable(con, table_name, mtcars_in, row.names = NA)) #' and a string) expect_error(dbCreateTable(con, table_name, mtcars_in, row.names = "make_model")) #' raise an error. }, # NULL ) DBItest/R/test-transaction.R0000644000176200001440000000063614157034343015407 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_transaction()]: #' Test transaction functions NULL #' Test transaction functions #' #' @inheritParams test_all #' @include test-meta.R #' @family tests #' @export test_transaction <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Transactions" run_tests(ctx, spec_transaction, skip, run_only, test_suite) } DBItest/R/spec-transaction-begin-commit-rollback.R0000644000176200001440000001234114157037563021524 0ustar liggesusers#' spec_transaction_begin_commit_rollback #' @family transaction specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_transaction_begin_commit_rollback <- list( begin_formals = function() { # expect_equal(names(formals(dbBegin)), c("conn", "...")) }, # commit_formals = function() { # expect_equal(names(formals(dbCommit)), c("conn", "...")) }, # rollback_formals = function() { # expect_equal(names(formals(dbRollback)), c("conn", "...")) }, #' @return #' `dbBegin()`, `dbCommit()` and `dbRollback()` return `TRUE`, invisibly. begin_commit_return_value = function(con) { expect_invisible_true(dbBegin(con)) on.exit({ dbRollback(con) }) expect_invisible_true(dbCommit(con)) on.exit(NULL) }, # begin_rollback_return_value = function(con) { expect_invisible_true(dbBegin(con)) expect_invisible_true(dbRollback(con)) }, #' #' @section Failure modes: #' The implementations are expected to raise an error in case of failure, #' but this is not tested. begin_commit_closed = function(ctx, closed_con) { #' In any way, all generics throw an error with a closed expect_error(dbBegin(closed_con)) expect_error(dbCommit(closed_con)) expect_error(dbRollback(closed_con)) }, # begin_commit_invalid = function(ctx, invalid_con) { #' or invalid connection. expect_error(dbBegin(invalid_con)) expect_error(dbCommit(invalid_con)) expect_error(dbRollback(invalid_con)) }, # commit_without_begin = function(con) { #' In addition, a call to `dbCommit()` expect_error(dbCommit(con)) }, # rollback_without_begin = function(con) { #' or `dbRollback()` #' without a prior call to `dbBegin()` raises an error. expect_error(dbRollback(con)) }, # begin_begin = function(con) { #' Nested transactions are not supported by DBI, #' an attempt to call `dbBegin()` twice dbBegin(con) on.exit({ dbRollback(con) }) #' yields an error. expect_error(dbBegin(con)) dbCommit(con) on.exit(NULL) }, #' @section Specification: #' Actual support for transactions may vary between backends. begin_commit = function(con) { #' A transaction is initiated by a call to `dbBegin()` dbBegin(con) #' and committed by a call to `dbCommit()`. success <- FALSE expect_error( { dbCommit(con) success <- TRUE }, NA ) if (!success) dbRollback(con) }, #' Data written in a transaction must persist after the transaction is committed. begin_write_commit = function(con) { #' For example, a record that is missing when the transaction is started table_name <- "dbit00" dbWriteTable(con, table_name, data.frame(a = 0L), overwrite = TRUE) dbBegin(con) on.exit({ dbRollback(con) }) #' but is created during the transaction dbExecute(con, paste0("INSERT INTO ", table_name, " (a) VALUES (1)")) #' must exist expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0:1)) #' both during dbCommit(con) on.exit(NULL) #' and after the transaction, expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0:1)) }, # second stage begin_write_commit = function(con, table_name = "dbit00") { #' and also in a new connection. expect_true(dbExistsTable(con, table_name)) expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0:1)) }, # begin_rollback = function(con) { #' #' A transaction dbBegin(con) #' can also be aborted with `dbRollback()`. expect_error(dbRollback(con), NA) }, #' All data written in such a transaction must be removed after the #' transaction is rolled back. begin_write_rollback = function(con, table_name) { #' For example, a record that is missing when the transaction is started dbWriteTable(con, table_name, data.frame(a = 0L), overwrite = TRUE) dbBegin(con) #' but is created during the transaction dbWriteTable(con, table_name, data.frame(a = 1L), append = TRUE) #' must not exist anymore after the rollback. dbRollback(con) expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0L)) }, # begin_write_disconnect = function(local_con) { table_name <- "dbit01" #' #' Disconnection from a connection with an open transaction dbWriteTable(local_con, table_name, data.frame(a = 0L), overwrite = TRUE) dbBegin(local_con) dbWriteTable(local_con, table_name, data.frame(a = 1L), append = TRUE) }, # begin_write_disconnect = function(local_con, table_name = "dbit01") { #' effectively rolls back the transaction. #' All data written in such a transaction must be removed after the #' transaction is rolled back. expect_equal(check_df(dbReadTable(local_con, table_name)), data.frame(a = 0L)) }, #' #' The behavior is not specified if other arguments are passed to these #' functions. In particular, \pkg{RSQLite} issues named transactions #' with support for nesting #' if the `name` argument is set. #' #' The transaction isolation level is not specified by DBI. NULL ) DBItest/R/test-result.R0000644000176200001440000000061114157034343014371 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_result()]: #' Test the "Result" class NULL #' Test the "Result" class #' #' @inheritParams test_all #' @include test-connection.R #' @family tests #' @export test_result <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Result" run_tests(ctx, spec_result, skip, run_only, test_suite) } DBItest/R/DBItest.R0000644000176200001440000000057014157034343013400 0ustar liggesusers#' @details #' The two most important functions are [make_context()] and #' [test_all()]. The former tells the package how to connect to your #' DBI backend, the latter executes all tests of the test suite. More #' fine-grained test functions (all with prefix `test_`) are available. #' #' See the package's vignette for more details. #' #' @author Kirill Müller "_PACKAGE" DBItest/NEWS.md0000644000176200001440000005037414157040710012656 0ustar liggesusers # DBItest 1.7.2 (2021-12-17) ## Features - `tweaks()` gains `dbitest_version` argument to support targeting a specific version of the DBItest package. The default is 1.7.1 (#236). - Reuse database connection for most tests (#245). - New `roundtrip_date_extended`, `roundtrip_timestamp_extended`, `append_roundtrip_date_extended` and `append_roundtrip_timestamp_extended` test dates between 1800 and 2999 (#148, #249). - New `quote_literal_empty` test (#248). - New `bind_character_escape` test for binding special characters (#242). - New `bind_time_minutes_integer` test for integer durations. ## Bug fixes - All column names are specified using lowercase on input, for compatibility with Redshift (#234). - `column_info_consistent` no longer tests mangling of column names (#181). - `spec_sql_append_table` test: Remove bad argument. ## Documentation - Improve documentation: list `spec_` objects in pkgdown help index, add cross references (#128). - Add specification for `value` argument to `DBI::dbWriteTable()` (#235). ## Internal - Replace internal `with_result()`, `with_remove_test_tables()` and `with_rollback_on_error()` for better error traces (#184, #250, #251, #253). - Use `palmerpenguins::penguins` instead of `iris` (#241). - Fix MySQL ODBC test on GitHub Actions (#237). - Improve testthat 3e compatibility: remove `testthat::expect_is()` and `testthat::expect_that()` from tests (#231, @michaelquinn32). - Decompose query used for testing `dbBind()`. # DBItest 1.7.1 (2021-07-30) ## Features - Many tests now emit simpler stack traces, because the connection is opened by the test driver and not by the test itself (#187). Reduce usage of `with_remove_test_table()` for better stack traces on error (#196). Remove `with_*connection()` (#193). - `test_some()` shows DBI code via dblog (#217) if `dblog = TRUE` (#226). - New `"bind_date_integer"`, `"bind_time_seconds"` and `"bind_time_hours"` tests (#218). - New `create_table_as` tweak (#131). - `"roundtrip_time"` and `"append_roundtrip_time"` tests now also test values of class `"difftime"` with units other than `"secs"` (#199). - All tables created by the tests have the `"dbit"` prefix. Almost all tests now use random table names to avoid collisions and unrelated test failures (#197). - `"roundtrip_timestamp"` tests now accept a time zone set by the database backend (#178, #198). - Support more than one class of each type in DBI backend packages. ## Bug fixes - Fix input dataset in `"overwrite_table_missing"` test (#210, @martinstuder). - Use original test name to decide if a test is skipped (#225). - Fix reexport test: skip if package is not installed, remove checks for deprecated functions and functions not reexported (#203). ## Internal - Requires DBI 1.1.1. - Test odbc as part of the backend tests (#228). - Dynamic build matrix for backends (#221). - Compatibility with testthat 3.0.0 (#207). - Switch to GitHub Actions (#201). # DBItest 1.7.0 (2019-12-16) ## Specifications - Specify tests for `dbGetInfo()`. - Specify `immediate` argument (r-dbi/DBI#268). - Specify `dbCreateTable()` and `dbAppendTable()` (#169). - New `unquote_identifier_table_schema` test: Identifiers of the form `table.schema` can be processed with `dbUnquoteIdentifier()`. - Fix `has_completed_statement` test (#176). ## Testing infrastructure - Document how to run tests externally and how to debug tests (#165). - `test_*()` gain new `run_only = NULL` argument that allow restricting the tests to be run with a positive match. `test_some()` uses `run_only` instead of constructing a regular expression with negative lookahead. This helps troubleshooting a single test with `testthat::set_reporter(DebugReporter$new())` . - `make_context()` gains `default_skip` argument and uses the `DBIConnector` class. - Support `NULL` default value in driver constructor (#171). ## Internal - Fulfill CII badge requirements (#179, @TSchiefer). - Use debugme. - Require R 3.2. - Avoid subsetting vectors out of bounds, for consistency with vctrs. # DBItest 1.6.0 (2018-05-03) ## New checks - Now checking that `Id()` is reexported. - Support `temporary` argument in `dbRemoveTable()` (default: `FALSE`) (r-dbi/DBI#141). - Added specification for the behavior in case of duplicate column names (#137). - The `bigint` argument to `dbConnect()` is now specified. Accepts `"integer64"`, `"integer"`, `"numeric"` and `"character"`, large integers are returned as values of that type (#133). - Add specification for partially filled `field.types` argument. - Specify `dbRemoveTable(fail_if_missing = FALSE)` (r-dbi/DBI#197). - Add specification for `dbColumnInfo()` (r-dbi/DBI#75). - Add specification for `dbListFields()` (r-dbi/DBI#75). - Test that named parameters are actually matched by name in `dbBind()`, by shuffling them (#138). - Explicitly specify default `row.names = FALSE` for `dbReadTable()` and `dbWriteTable()` (#139). - Add specification for writing 64-bit values, backends must support roundtripping values returned from the database (#146). - Add specification for the `params` argument to `dbGetQuery()`, `dbSendQuery()`, `dbExecute()` and `dbSendStatement()` (#159). - Add test for `dbQuoteIdentifier()`: "The names of the input argument are preserved in the output" (r-lib/DBI#173). - Blob tests now also read and write zero bytes (\x00). - Add string encoded in Latin-1 to the character tests. - Added test for `dbIsValid()` on stale connections. ## Removed checks - Don't test selecting untyped `NULL` anymore. - Full interface compliance doesn't require a method for `dbGetInfo(DBIDriver)` for now. - Remove `"cannot_forget_disconnect"` test that fails on R-devel (#150). - Methods without `db` prefix are not checked for ellipsis in the signature anymore. - Don't specify `Inf` and `NaN` for lack of consistent support across DBMS (#142). ## Updated/corrected checks - Fix query that is supposed to generate a syntax error. - Fix typo (#147, @jonmcalder). - Implement `POSIXlt` bind test correctly. - Improve error detection for `dbBind()`. - Redesign tests for `dbBind()`, now queries of the form `SELECT CASE WHEN (? = ?) AND (? IS NULL) THEN 1.5 ELSE 2.5` are issued. The original tests were inappropriate for RMariaDB, because an untyped placeholder is returned as a blob. - Transaction tests now use `dbWriteTable()` instead of `dbCreateTable()`, because some DBMS don't support transactions for DML. - Fix timestamp tests for RMariaDB. - Fix string constants. - The `"roundtrip_timestamp"` test now correctly handles timezone information. The output timezone is ignored. - Clear result in `spec_meta_get_info_result` (#143). - Use named argument for `n` in `dbGetQuery()` call. - Minor fixes. ## Tweaks - New tweak `blob_cast` allows specifying a conversion function to the BLOB data type. - New `is_null_check` tweak that allows specifying a function that is used when checking values for `NULL`. Required for RPostgres. - New `list_temporary_tables` tweak that can be enabled independently of `temporary_tables` to indicate that the DBMS does not support listing temporary tables. ## Infrastructure - Allow running only a subset of tests in `test_all()` by specifying an environment variable. - `test_all()` and `test_some()` return `NULL` invisibly. ## Internals - Compatibility code if `DBI::dbQuoteLiteral()` is unavailable. - New `trivial_query()` replaces many hard-coded queries and uses non-integer values for better compatibility with RMariaDB. - Convert factor to character for iris data (#141). # DBItest 1.5-2 (2018-01-26) - Fix test that fails with "noLD". - Fix NOTEs on R-devel. # DBItest 1.5-1 (2017-12-10) - Remove `"cannot_forget_disconnect"` test that fails on R-devel (#150). # DBItest 1.5 (2017-06-18) Finalize specification. Most tests now come with a corresponding prose, only those where the behavior is not finally decided don't have a prose version yet (#88). New tests --------- - Test behavior of methods in presence of placeholders (#120). - Test column name mismatch behavior for appending tables (#93). - Test that `dbBind()` against factor works but raises a warning (#91). - Test roundtrip of alternating empty and non-empty strings (#42). - Test multiple columns of different types in one statement or table (#35). - Test `field.types` argument to `dbWriteTable()` (#12). - Added tests for invalid or closed connection argument to all methods that expect a connection as first argument (#117). - Enabled test that tests a missing `dbDisconnect()`. - Add test for unambiguous escaping of identifiers (rstats-db/RSQLite#123). - Reenable tests for visibility (#89). - Fix and specify 64-bit roundtrip test. - 64-bit integers only need to be coercible to `numeric` and `character` (#74). - Added roundtrip test for time values (#14). - Added tweaks for handling date, time, timestamp, ... (#53, #76). - Test that `dbFetch()` on update-only query returns warning (#66). Adapted tests ------------- - `NULL` is a valid value for the `row.names` argument, same as `FALSE`. - A column named `row_names` receives no special handling (#54). - A warning (not an error anymore) is expected when calling `dbDisconnect()` on a closed or invalid connection. - `row.names = FALSE` is now the default for methods that read or write tables. - Add `NA` to beginning and end of columns in table roundtrip tests (#24). - Stricter tests for confusion of named and unnamed SQL parameters and placeholders (#107). - Also check names of all returned data frames. - The return value for all calls to `dbGetQuery()`, `dbFetch()`, and `dbReadTable()` is now checked for consistency (all columns have the same length, length matches number of rows) (#126). - Removed stress tests that start a new session. - Allow `hms` (or other subclasses of `difftime`) to be returned as time class (#135, @jimhester). - Test that dates are of type `numeric` (#99, @jimhester). - Replace `POSIXlt` by `POSIXct` (#100, @jimhester). - Use `"PST8PDT"` instead of `"PST"` as time zone (#110, @thrasibule). - Added tests for support of `blob` objects (input and output), but backends are not required to return `blob` objects (#98). - The `logical_return`, `date_typed` and `timestamp_typed` tweaks are respected by the bind tests. - Fixed tests involving time comparison; now uses UTC timezone and compares against a `difftime`. - Tests for roundtrip of character values now includes tabs, in addition to many other special characters (#85). - Make sure at least one table exists in the `dbListTables()` test. - Fix roundtrip tests for raw columns: now expecting `NULL` and not `NA` entries for SQL NULL values. - Fix `expect_equal_df()` for list columns. - Testing that a warning is given if the user forgets to call `dbDisconnect()` or `dbClearResult()` (#103). - Numeric roundtrip accepts conversion of `NaN` to `NA` (#79). Internal -------- - Fix R CMD check errors. - Internal consistency checks (#114). - Skip patterns that don't match any of the tests now raise a warning (#84). - New `test_some()` to test individual tests (#136). - Use desc instead of devtools (#40). - All unexpected warnings are now reported as test failures (#113). - `DBItest_tweaks` class gains a `$` method, accessing an undefined tweak now raises an error. - The arguments of the `tweaks()` function now have default values that further describe their intended usage. - New `with_closed_connection(ctx = ctx, )`, `with_invalid_connection(ctx = ctx, )`, `with_result()` and `with_remove_test_table()` helpers, and `expect_visible()`, `expect_inbisible_true()`, and `expect_equal_df()` expectations for more concise tests. # DBItest 1.4 (2016-12-02) ## DBI specification - Use markdown in documentation. - Description of parametrized queries and statements (#88). - New hidden `DBIspec-wip` page for work-in-progress documentation. - Get rid of "Format" and "Usage" sections, and aliases, in the specs. ## Tests - Not testing for presence of `max.connections` element in `dbGetInfo(Driver)` (rstats-db/DBI#56). - Test multi-row binding for queries and statements (#96). - New `ellipsis` check that verifies that all implemented DBI methods contain `...` in their formals. This excludes `show()` and all methods defined in this or other packages. - Refactored `bind_` tests to use the new `parameter_pattern` tweak (#95). - Rough draft of transaction tests (#36). - New `fetch_zero_rows` test, split from `fetch_premature_close`. - The "compliance" test tests that the backend package exports exactly one subclass of each DBI virtual class. - Document and enhance test for `dbDataType("DBIDriver", "ANY")` (#88). - Minor corrections for "bind" tests. ## Internal - Isolate stress tests from main test suite (#92). - Refactor test specification in smaller modules, isolated from actual test execution (#81). This breaks the documentation of the tests, which will be substituted by a DBI specification in prose. - Align description of binding with code. - Refactor tests for `dbBind()`, test is run by `BindTester` class, and behavior is specified by members and by instances of the new `BindTesterExtra` class. - The `skip` argument to the `test_()` functions is again evaluated with `perl = TRUE` to support negative lookaheads (#33). - Use `dbSendStatement()` and `dbExecute()` where appropriate. - Avoid empty subsections in Rd documentation to satisfy `R CMD check` (#81). # DBItest 1.3 (2016-07-07) Bug fixes --------- - Fix `read_table` test when the backend actually returns the data in a different order. New tests --------- - Test `dbDataType()` on connections (#69, #75, @imanuelcostigan). - Check returned strings for UTF-8 encoding (#72). - Repeated `dbBind()` + `dbFetch()` on the same result set (#51). Features -------- - `tweaks()` gains an `...` as first argument to support future/deprecated tweaks (with a warning), and also to avoid unnamed arguments (#83). - `testthat` now shows a more accurate location for the source of errors, failures, and skips (#78). - Aggregate skipped tests, only one `skip()` call per test function. - Indicate that some tests are optional in documentation (#15). Internal -------- - New `constructor_relax_args` tweak, currently not queried. - The `ctx` argument is now explicit in the test functions. - Change underscores to dashes in file names. - Remove `testthat` compatibility hack. - New `all_have_utf8_or_ascii_encoding()` which vectorizes `has_utf8_or_ascii_encoding()`. - Test on AppVeyor (#73). - Work around regression in R 3.3.0 (fix scheduled for R 3.3.1) which affected stress tests. # DBItest 1.2 (2016-05-21) - Infrastructure - Support names for contexts (@hoesler, #67). - The `skip` argument to the test functions is now treated as a Perl regular expression to allow negative lookahead. Use `skip = "(?!test_regex).*"` to choose a single test to run (#33). - Added encoding arguments to non-ASCII string constants (#60, @hoesler). - Improve tests - `simultaneous_connections` test always closes all connections on exit (@hoesler, #68). - More generic compliance check (@hoesler, #61). - Update documentation to reflect test condition (@imanuelcostigan, #70). - `testthat` dependency - Import all of `testthat` to avoid `R CMD check` warnings. - Compatibility with dev version of `testthat` (#62). - Improve Travis builds - Use container-based builds on Travis. - Install `RPostgres` and `RMySQL` from `rstats-db`. - Install `DBI` and `testthat` from GitHub. Version 1.1 (2016-02-12) === - New feature: tweaks - New argument `tweaks` to `make_context()` (#49). - New `tweaks()`, essentially constructs a named list of tweaks but with predefined and documented argument names. - `constructor_name`, respected by the `constructor.*` tests. - `strict_identifier`, if `TRUE` all identifier must be syntactic names even if quoted. The quoting test is now split, and a part is ignored conditional to this tweak. The `roundtrip_quotes` tests also respects this tweak. - `omit_blob_tests` for DBMS that don't have a BLOB data type. - `current_needs_parens` -- some SQL dialects (e.g., BigQuery) require parentheses for the functions `current_date`, `current_time` and `current_timestamp`. - `union`, for specifying a nonstandard way of combining queries. All union queries now name each column in each subquery (required for `bigrquery`). - New tests - `dbGetInfo(Result)` (rstats-db/DBI#55). - `dbListFields()` (#26). - New `package_name` test in `test_getting_started()`. - Improved tests - Stress test now installs package in temporary library (before loading `DBI`) using `R CMD INSTALL` before loading DBI (rstats-db/RSQLite#128, #48). - Row count is now tested for equality but not identity, so that backends can return a numeric value > 2^31 at their discretion. - Call `dbRemoveTable()` instead of issuing `DROP` requests, the latter might be unsupported. - Use subqueries in queries that use `WHERE`. - Test that `dbClearResult()` on a closed result set raises a warning. - Expect a warning instead of an error for double disconnect (#50). - Move connection test that requires `dbFetch()` to `test_result()`. - Split `can_connect_and_disconnect` test. - Expect `DBI` to be in `Imports`, not in `Depends`. - Removed tests - Remove test for `dbGetException()` (rstats-db/DBI#51). - Bug fixes - Fix broken tests for quoting. - Self-testing - Test `RPostgres`, `RMySQL`, `RSQLite` and `RKazam` as part of the Travis-CI tests (#52). - Travis CI now installs rstats-db/DBI, updated namespace imports (`dbiCheckCompliance()`, `dbListResults()`). - Use fork of `testthat`. - Utilities - Return test results as named array of logical. Requires hadley/testthat#360, gracefully degrades with the CRAN version. - Internal - Refactored the `get_info_()` tests to use a vector of names. - Use versioned dependency for DBI - Use unqualified calls to `dbBind()` again Version 1.0 (2015-12-17) === - CRAN release - Eliminate errors on win-builder - Satisfy R CMD check - Use LGPL-2 license - Add RStudio as copyright holder - Move `devtools` package from "Imports" to "Suggests" Version 0.3 (2015-11-15) === - Feature-complete, ready for review - Tests from the proposal - Add missing methods to compliance check - Add simple read-only test (#27) - Add stress tests for repeated load/unload (with and without connecting) in new R session (#2), - Migrate all tests from existing backends (#28) - Refactor `data_` tests to use a worker function `test_select()` - Test tables with `NA` values above and below the non-`NA` value in `data_` tests - Test return values and error conditions for `dbBind()` and `dbClearResult()` (#31) - Test vectorization of `dbQuoteString()` and `dbQuoteIdentifier()` (#18) - Test that dates have `integer` as underlying data type (#9) - Roundtrip tests sort output table to be sure (#32) - Test `NA` to `NULL` conversion in `dbQuoteString()`, and false friends (#23) - Enhance test for `dbQuoteIdentifier()` (#30) - Style - Avoid using `data.frame()` for date and time columns (#10) - Use `expect_identical()` instead of `expect_equal()` in many places (#13) - Catch all errors in `on.exit()` handlers via `expect_error()` (#20). - Combine "meta" tests into new `test_meta()` (#37) - Documentation - New "test" vignette (#16) - Add package documentation (#38) - Same as 0.2-5 Version 0.2 (2015-11-11) === - Tests from the proposal - SQL - Metadata - DBI compliance (not testing read-only yet) - Migrate most of the tests from RMySQL - Test improvements - Test BLOB data type (#17) - Check actual availability of type returned by `dbDataType()` (#19) - Testing infrastructure - Disambiguate test names (#21) - Use regex matching for deciding skipped tests, skip regex must match the entire test name - Documentation - Document all tests in each test function using the new inline documentation feature of roxygen2 - Improve documentation for `test_all()`: Tests are listed in new "Tests" section - Add brief instructions to README - Move repository to rstats-db namespace - Same as 0.1-6 Version 0.1 (2015-10-11) === - First GitHub release - Builds successfully on Travis - Testing infrastructure - Test context - Skipped tests call `skip()` - Function `test_all()` that runs all tests - Tests from the proposal - Getting started - Driver - Connection - Results - Code formatting is checked with lintr - Same as 0.0-5 DBItest/MD50000644000176200001440000002005614157055565012100 0ustar liggesusers76674d8b73518c4204f1be4c72886f60 *DESCRIPTION c8a3c0263f266b0a8d96c14ddbd9c226 *NAMESPACE 7e24156c9ce2050acefedf0f458a1d79 *NEWS.md f956e8e1290d2316d720831804645f34 *R/DBItest.R eb82398eefd2deaba5388c698333f726 *R/context.R 1801016b55e2ffb2a1a2d47ffa4caca3 *R/expectations.R 6bebc38570e75e033bf9bfd3227fa457 *R/import-dbi.R 7055df4f2eaff247f588b85390af34e4 *R/import-testthat.R ecf5e0fe2491c2e31dd21ecba54d8b29 *R/run.R 2cd97136984097bbf3dbb0cabe8962a8 *R/s4.R b4cea85b533fc783df25b517d0b96ff9 *R/spec-.R 0f31c304333be84f7316c3e4b97f3c0d *R/spec-all.R 7bbe6109582337c712256a9d57d35a68 *R/spec-compliance-methods.R d17825fd88154b476dae96a2def9fafa *R/spec-compliance.R e7219578c76f2692c5bda1edd6bc05c8 *R/spec-connection-data-type.R 61f1d58c35f7086f32da75d62fe4bd61 *R/spec-connection-disconnect.R 5159a5bf7ad4d861ef1276fd5ad3bd5e *R/spec-connection-get-info.R ea73df12a2285a773993aa84a1072aae *R/spec-connection.R 28d9c3ebafed4bf5d4d172253ba66ecd *R/spec-driver-connect.R 652f175636d6d934fbd02e32076b2550 *R/spec-driver-constructor.R 7a0424daca39b764042615ae17a70c89 *R/spec-driver-data-type.R a1c8c695ecba7334417d6acbd9c8dfa1 *R/spec-driver-get-info.R 61b7a29ae92eb8e40c97501cec7e88ec *R/spec-driver.R 61ecf0d75572bf08f87fa43b3aaa2216 *R/spec-getting-started.R 60bcdd6429fbc7a1f108e285b7847300 *R/spec-meta-bind-.R 3df469eec59963187875c2fda08d17cf *R/spec-meta-bind-runner.R cef5422a0a569e294cf4625f39efa8f2 *R/spec-meta-bind-tester-extra.R 26d3ce6eea97b36f45e37b07dc12f7f0 *R/spec-meta-bind.R efd38df11fb3607cea505b8734dc2bb5 *R/spec-meta-column-info.R 906104cb25f4b8312f80ee459c803fca *R/spec-meta-get-info-result.R f6543eff1c4e0bb265ca45a907f0ff67 *R/spec-meta-get-row-count.R eda8e063a808a8a3cbd4f7cf1db19375 *R/spec-meta-get-rows-affected.R 3146e03a9d397161dc810aaf007f96a8 *R/spec-meta-get-statement.R 6a69b67bacda6259e511d1ef07cda764 *R/spec-meta-has-completed.R d2cfa6d924fefce33ae10f0574abe9c9 *R/spec-meta-is-valid.R 248f354e81b09e68530d6d2aff4927d8 *R/spec-meta.R 5757b712f1a727f9abdacb9e296bdbb9 *R/spec-result-clear-result.R 5bac5218eafc0aba1e45ffee4a5891a0 *R/spec-result-create-table-with-data-type.R 3e583158342760f724c4f8b183c0e4bd *R/spec-result-execute.R 4c7791925675604a07e28ab33d7c3c11 *R/spec-result-fetch.R 2063ef81d7cc95b44bb9a208577c8b73 *R/spec-result-get-query.R 993d8350a89eade0273e432ce768bba0 *R/spec-result-roundtrip.R c3f6e3748b699d704f99d65a10801a41 *R/spec-result-send-query.R c793d1c100ac89d6b4133a0aac5daa34 *R/spec-result-send-statement.R 81ebec2d79ecc0d4a9654851efe3e42c *R/spec-result.R b0e8e49761f759037f6f314f37bfbe9d *R/spec-sql-append-table.R 24c73480503886845423649f817646ab *R/spec-sql-create-table.R 567a45798f0fb1e07c879f4a6387b138 *R/spec-sql-exists-table.R adb7700f8d43faaf1fe531bd6cabd0c5 *R/spec-sql-list-fields.R e14a4a29495288f6880b4c3cf726a784 *R/spec-sql-list-objects.R beecbde1e8785ccc913d69ba5d915948 *R/spec-sql-list-tables.R ddd36e1cba0e8d63cdb5e56caba572e1 *R/spec-sql-quote-identifier.R 3f5c8b8408e48a3c464a0a7218344d55 *R/spec-sql-quote-literal.R 8cf498db1ef7db056effd8c74c793509 *R/spec-sql-quote-string.R c6ae88d7cf649baf4f7d97c695ad1efe *R/spec-sql-read-table.R afd191cb57b972066f6da38434e0ae8d *R/spec-sql-remove-table.R c6d9bb949682dafaf4fbcf925486e506 *R/spec-sql-unquote-identifier.R 96b9b699cc32a579c82796f37bdcc877 *R/spec-sql-write-table.R 8f29e1f917f821f6c37c5069a47fa082 *R/spec-sql.R b226a8b603ef3d6ac14da2732670c79f *R/spec-stress-connection.R e5c8c247e5b41ae3186378b39996fb85 *R/spec-stress.R e2e80da1ce88525a20d9e4f3b096a6e7 *R/spec-transaction-begin-commit-rollback.R 2dd177dfed226508144a385628a830b3 *R/spec-transaction-with-transaction.R dec1d80d14bdc41a93eea2acc9b24d2f *R/spec-transaction.R f5e3a264bad0456617159a1f2ddaa1ad *R/test-all.R bd97abed70883811283b9c0bba482b69 *R/test-compliance.R 559cf124790435fabdc22b66fc27087f *R/test-connection.R 30f0ebdf518955c83beb5555bcd3b0a3 *R/test-driver.R 1b3088ec4d7b7c1f66488e665827cef5 *R/test-getting-started.R ac9da359e3155bdcc7d911b62e1c27e6 *R/test-meta.R dd0fddb6f302c4feb92d732422c536be *R/test-result.R 6706e08f8fa9da4b8917bdd05daf0f69 *R/test-sql.R 9fa9a511dbf8116e9a083f8cf07a28ac *R/test-stress.R 820ed3d6a4fd8630a16b01b80799d7c3 *R/test-transaction.R 5b2381e77867550ccaba2a41e64a8ba3 *R/tweaks.R a94950e32f7138efb47198017154de70 *R/utf8.R f62e256ae5774c0f1249e5a4062620ed *R/utils.R 5afb206644346b2c38635fbe176c037c *R/zzz.R ee0d875a7787e939c242a313610b8e79 *README.md 7050eb13bf7d6b08ffe7dd9137c0252d *build/vignette.rds d7f5619c1fded073e01288d42c964462 *inst/WORDLIST 7ebdf35c2da8a12cb5f07a3be046cb40 *inst/doc/DBItest.R b35cd011898233316fa264cb34b070e2 *inst/doc/DBItest.Rmd 5f1ef263ea092353b35c0896900f21c5 *inst/doc/DBItest.html a24004c2656a5e89f70ecfa72d82380d *man/DBItest-package.Rd fab4df7982acc4830e96509dda0ba018 *man/context.Rd 0d3380f496d0caf0eede3661fbfe9a38 *man/make_placeholder_fun.Rd 7f1475c0834448dc7640d381adaf968d *man/spec_compliance_methods.Rd 4e05c7320e9f47f7d81d25c36988562e *man/spec_connection_disconnect.Rd 4caf0d0c2dc891346069109fd2c8a4ea *man/spec_driver_connect.Rd 8750a5923ac71157f00077b4c378fe78 *man/spec_driver_constructor.Rd 2af82d00b07554bfed5b6966fbf18385 *man/spec_driver_data_type.Rd 54aae4f8e90e4313fb5699ac0332fe9e *man/spec_get_info.Rd 2db06c65cf13d292a64c0fc62d36308a *man/spec_getting_started.Rd 6bddc2b47a1f139ba400e43db4542ff0 *man/spec_meta_bind.Rd 712a6437ca77a6ce68c828100cb631cd *man/spec_meta_column_info.Rd 0088e14e3855ee2a67010fa8163d1aad *man/spec_meta_get_row_count.Rd 51272b577ebe41f0e1d8425dc6948d0e *man/spec_meta_get_rows_affected.Rd 39106f5dbaa7275034a5a7d54b1f14dc *man/spec_meta_get_statement.Rd 93c0da7f4b87a6e48d80012890222da0 *man/spec_meta_has_completed.Rd 27ced5f145d6bf75eb43f59612b51e67 *man/spec_meta_is_valid.Rd 8a45554a3fd050d068029dd42e11e9f5 *man/spec_result_clear_result.Rd 9dc39bfc301eb8e4cf7594bfba938011 *man/spec_result_create_table_with_data_type.Rd 3b501f4b7a6720af0be39b5098f75da6 *man/spec_result_execute.Rd 94ca8902d738c3da177c5e8f68e4a68e *man/spec_result_fetch.Rd ef5468018150443dd3212a2492b9e58e *man/spec_result_get_query.Rd b03dfbd95ebc8e81821465529390a2a6 *man/spec_result_roundtrip.Rd 8c0c0f940abcf2adb09b9bf6ce3cee28 *man/spec_result_send_query.Rd 8e1e2535f0680e6c388cd09dcb468a36 *man/spec_result_send_statement.Rd 9112495f77f8e617a2de3859f2003476 *man/spec_sql_append_table.Rd 3022ba3f177db56c410f38eb9cbbb9ae *man/spec_sql_create_table.Rd 4c1ef658d8db7281456f0027b2df5df6 *man/spec_sql_exists_table.Rd ac0a30ac672ab98e4c0612edfd2f9815 *man/spec_sql_list_fields.Rd bd3f6fd46c9e8527ea5c288b8d5a0a52 *man/spec_sql_list_objects.Rd baffb4193580ed448ce95fe6636962cb *man/spec_sql_list_tables.Rd 019c1b38ac665f0eee311cf2d8202737 *man/spec_sql_quote_identifier.Rd 37435612b2d2ba7044a4cb8fd2842ad4 *man/spec_sql_quote_literal.Rd 8d51036e1487b4ea654a79a9edc5c0d9 *man/spec_sql_quote_string.Rd ef9d7a0055d5d8ca9e61cead9c5d0068 *man/spec_sql_read_table.Rd 9377c86f95dcc5bece5175aa45ece984 *man/spec_sql_remove_table.Rd 667f8db05ec9401040456f1dc02d9ea9 *man/spec_sql_unquote_identifier.Rd 7b49674dee4ff6d34ee586406cfcecc0 *man/spec_sql_write_table.Rd 8443457cc6459e4452b3d0294c9d5d78 *man/spec_transaction_begin_commit_rollback.Rd 196c60c3b9b2da4dadc9d043426e4e9f *man/spec_transaction_with_transaction.Rd 0f9db7b203715994283b1aa1567d79ce *man/test_all.Rd 1a83dfc37e66d3e5662886817462ca9f *man/test_compliance.Rd 82281a0894948f781d82aa2a3d0d9611 *man/test_connection.Rd 157720b8395a16a24fb7158af0da3d59 *man/test_data_type.Rd 3bd7cb6a205f0be81c5bb154ac449287 *man/test_driver.Rd 9346a6165b32b3130b75bd106941d686 *man/test_getting_started.Rd 2cfb5682e30f035b50487ffbc41453d8 *man/test_meta.Rd 0e267fa091425edf262d238e37c33398 *man/test_result.Rd 7c81a07cac3c0b463dc75f5729add20b *man/test_sql.Rd fa95039a885e48841b0ff509e2dab4cc *man/test_stress.Rd ffa73185d344265dd920172c91f43c9c *man/test_transaction.Rd 6650fe838683a22349724400bcd1bf2d *man/tweaks.Rd e66cc0201e7914ca0a08d2401d1ac8a8 *tests/testthat.R 3cbb092bf66920023972495c5d5dda98 *tests/testthat/test-consistency.R 3675efbbcc4ee2129cfe7b52a10fd282 *tests/testthat/test-context.R e9b3b70248052aab0f62a4db507a74f1 *tests/testthat/test-lint.R 8d740e8ffa890ea201a4be8119408ef1 *tests/testthat/test-tweaks.R b35cd011898233316fa264cb34b070e2 *vignettes/DBItest.Rmd DBItest/inst/0000755000176200001440000000000014157052537012536 5ustar liggesusersDBItest/inst/doc/0000755000176200001440000000000014157052537013303 5ustar liggesusersDBItest/inst/doc/DBItest.html0000644000176200001440000006540714157052537015503 0ustar liggesusers Testing DBI backends

Testing DBI backends

Kirill Müller

This document shows how to use the DBItest package when implementing a new DBI backend or when applying it to an existing backend. The DBItest package provides a large collection of automated tests.

Testing a new backend

The test cases in the DBItest package are structured very similarly to the sections in the ā€œbackendā€ vignette:

vignette("backend", package = "DBI")

Like the ā€œbackendā€ vignette, this vignette assumes that you are implementing the RKazam package that has a Kazam() function that creates a new DBIDriver instance for connecting to a ā€œKazamā€ database.

You can add the tests in the DBItest package incrementally, as you proceed with implementing the various parts of the DBI. The DBItest package builds upon the testthat package. To enable it, run the following in your package directory (after installing or updating devtools):

devtools::use_testthat()
devtools::use_test("DBItest")

This creates, among others, a file test-DBItest.R in the tests/testthat directory. Replace its entire contents by the following:

DBItest::make_context(Kazam(), NULL)
DBItest::test_getting_started()

Now test your package with devtools::test(). If you followed at least the ā€œGetting startedā€ section of the DBI ā€œbackendā€ vignette, all tests should succeed.

By adding the corresponding test function to your tests/test-DBItest.R file before implementing a section, you get immediate feedback which functionality of this section still needs to be implemented by running devtools::test() again. Therefore, proceed by appending the following to tests/test-DBItest.R, to include a test case for the forthcoming section:

DBItest::test_driver()

Again, all tests should succeed when you are done with the ā€œDriverā€ section. Add the call to the next tester function, implement the following section until all tests succeed, and so forth.

In this scenario, you are usually interested only in the first error the test suite finds. The StopReporter of testthat is most helpful here, activate it by passing reporter = "stop" to devtools::test(). Alternatively, call the relevant DBItest::test_() function directly.

The tests are documented with the corresponding functions: For instance, ?test_driver shows a coarse description of all tests for the ā€œDriverā€ test case. Test failures will include the name of the test that is failing; in this case, investigating the documentation or the source code of the DBItest package will usually lead to the cause of the error.

Not all tests can be satisfied: For example, there is one test that tests that logical variables survive a write-read roundtrip to the database, whereas another test tests that logical variables are converted to integer in such a case. Tests can be skipped by adding regular expressions for the tests to skip as character vector to the call, as in the following1:

DBItest::test_driver(skip = c(
  "data_type"           # Reason 1...
  "constructor.*",      # Reason 2...
  NULL
))

Some other reasons to skip tests are: - your database does not support a feature - you want to postpone or avoid the implementation of a feature - the test takes too long to run.

Testing an existing backend

For an existing backends, simply enabling all tests may be the quickest way to get started. Run the following in your package directory (after installing or updating devtools):

devtools::use_testthat()
devtools::use_test("DBItest")

This creates, among others, a file test-DBItest.R in the tests/testthat directory. Replace its entire contents by the following:

DBItest::make_context(Kazam(), NULL)
DBItest::test_all()

The notes about ā€œKazamā€ and skipping tests from the previous section apply here as well. The test_all() function simply calls all test cases.

External testing

DBItest is currently geared towards usage as part of a package’s test suite. With some effort it is possible to test a database backend against a custom database. This can help verify that your database installation gives expected results when accessed with DBI with specific connection arguments.

The example below shows how to run tests with the RSQLite backend.

Preparation

First, we need to define a test context. It contains:

  • a connector that describes how to establish the database connection, see ?DBI::`DBIConnector-class` for details,
  • tweaks, see ?tweaks,
  • tests skipped by default, as a character vector.

Database backends that use DBItest for testing usually have a file test/testthat/helper-DBItest.R or test/testthat/test-DBItest.R where a call to make_context() can be found. The help for make_context() already contains an example that works for RSQLite. Adapt it to your needs.

The make_context() function must be called before any tests can run.

library(DBItest)

tweaks <- tweaks(
  constructor_relax_args = TRUE,
  placeholder_pattern = c("?", "$1", "$name", ":name"),
  date_cast = function(x) paste0("'", x, "'"),
  time_cast = function(x) paste0("'", x, "'"),
  timestamp_cast = function(x) paste0("'", x, "'"),
  logical_return = function(x) as.integer(x),
  date_typed = FALSE,
  time_typed = FALSE,
  timestamp_typed = FALSE
)

default_skip <- c("roundtrip_date", "roundtrip_timestamp")

invisible(make_context(
  new(
    "DBIConnector",
    .drv = RSQLite::SQLite(),
    .conn_args = list(dbname = tempfile("DBItest", fileext = ".sqlite"))
  ),
  tweaks = tweaks,
  default_skip = default_skip
))

Testing

Use test_all() to run all tests, and test_some() to run a specific test that failed previously. The test_* functions need to be run with a testthat reporter to avoid stopping at the first error or warning. For interactive use, the ā€œprogressā€ reporter gives good results. In the example below, the ā€œlocationā€ and ā€œstopā€ reporters are combined. Review ?testthat::Reporter for a list of reporters.

DBItest::test_some("get_query_atomic")
## drv1 <- new("SQLiteDriver")
## conn1 <- dbConnect(drv1, dbname = "/var/folders/cw/805_99kx0lq50gkr0hr81_y40000gp/T//RtmpiMfUSu/DBItest2b3d10da19eb.sqlite")
## print(statement1 <- "SELECT 1.5 AS a")
## ## [1] "SELECT 1.5 AS a"
## dbGetQuery(conn1, statement1)
## ##     a
## ## 1 1.5
## Test passed šŸ„‡
## dbDisconnect(conn1)

DBItest relies heavily on metaprogramming. Unfortunately, this means that a failing test may give no indication of the reason for the failure. The test_some() function now by default integrates the new experimental dblog package package. It prints the DBI code that is executed as the tests are run, as seen above.

Another way to scout for the reason of the problem is to review the sources of DBItest and relate the test name (that is printed with each failure) with the human-readable specification embedded with the test code.

testthat::with_reporter(
  c("location", "fail"),
  DBItest::test_some("get_query_atomic")
)
## drv1 <- new("SQLiteDriver")
## conn1 <- dbConnect(drv1, dbname = "/var/folders/cw/805_99kx0lq50gkr0hr81_y40000gp/T//RtmpiMfUSu/DBItest2b3d10da19eb.sqlite")
## Start test: DBItest: Result: get_query_atomic
##   run.R:78:11 [success]
## print(statement1 <- "SELECT 1.5 AS a")
## ## [1] "SELECT 1.5 AS a"
## dbGetQuery(conn1, statement1)
## ##     a
## ## 1 1.5
##   run.R:78:11 [success]
##   run.R:78:11 [success]
##   run.R:78:11 [success]
##   run.R:78:11 [success]
##   run.R:78:11 [success]
##   run.R:78:11 [success]
##   run.R:78:11 [success]
##   run.R:78:11 [success]
## End test: DBItest: Result: get_query_atomic
## 
## dbDisconnect(conn1)

  1. The terminating NULL allows appending new lines to the end by copy-pasting an existing line, without having to take care of the terminating comma.ā†©ļøŽ

DBItest/inst/doc/DBItest.Rmd0000644000176200001440000001622614157034343015247 0ustar liggesusers--- title: "Testing DBI backends" author: "Kirill Müller" output: rmarkdown::html_vignette vignette: > %\VignetteIndexEntry{Testing DBI backends} %\VignetteEngine{knitr::rmarkdown} \usepackage[utf8]{inputenc} --- ```{r setup, include = FALSE} knitr::opts_chunk$set(error = (getRversion() < "3.5")) ``` This document shows how to use the DBItest package when implementing a new DBI backend or when applying it to an existing backend. The DBItest package provides a large collection of automated tests. ## Testing a new backend The test cases in the DBItest package are structured very similarly to the sections in the "backend" vignette: ```r vignette("backend", package = "DBI") ``` Like the "backend" vignette, this vignette assumes that you are implementing the `RKazam` package that has a `Kazam()` function that creates a new `DBIDriver` instance for connecting to a "Kazam" database. You can add the tests in the DBItest package incrementally, as you proceed with implementing the various parts of the DBI. The DBItest package builds upon the testthat package. To enable it, run the following in your package directory (after installing or updating `devtools`): ```r devtools::use_testthat() devtools::use_test("DBItest") ``` This creates, among others, a file `test-DBItest.R` in the `tests/testthat` directory. Replace its entire contents by the following: ```r DBItest::make_context(Kazam(), NULL) DBItest::test_getting_started() ``` Now test your package with `devtools::test()`. If you followed at least the "Getting started" section of the DBI "backend" vignette, all tests should succeed. By adding the corresponding test function to your `tests/test-DBItest.R` file *before* implementing a section, you get immediate feedback which functionality of this section still needs to be implemented by running `devtools::test()` again. Therefore, proceed by appending the following to `tests/test-DBItest.R`, to include a test case for the forthcoming section: ```r DBItest::test_driver() ``` Again, all tests should succeed when you are done with the "Driver" section. Add the call to the next tester function, implement the following section until all tests succeed, and so forth. In this scenario, you are usually interested only in the first error the test suite finds. The `StopReporter` of `testthat` is most helpful here, activate it by passing `reporter = "stop"` to `devtools::test()`. Alternatively, call the relevant `DBItest::test_()` function directly. The tests are documented with the corresponding functions: For instance, `?test_driver` shows a coarse description of all tests for the "Driver" test case. Test failures will include the name of the test that is failing; in this case, investigating the documentation or the source code of the DBItest package will usually lead to the cause of the error. Not all tests can be satisfied: For example, there is one test that tests that `logical` variables survive a write-read roundtrip to the database, whereas another test tests that `logical` variables are converted to `integer` in such a case. Tests can be skipped by adding regular expressions for the tests to skip as character vector to the call, as in the following[^termnull]: [^termnull]: The terminating `NULL` allows appending new lines to the end by copy-pasting an existing line, without having to take care of the terminating comma. ```r DBItest::test_driver(skip = c( "data_type" # Reason 1... "constructor.*", # Reason 2... NULL )) ``` Some other reasons to skip tests are: - your database does not support a feature - you want to postpone or avoid the implementation of a feature - the test takes too long to run. ## Testing an existing backend For an existing backends, simply enabling all tests may be the quickest way to get started. Run the following in your package directory (after installing or updating `devtools`): ```r devtools::use_testthat() devtools::use_test("DBItest") ``` This creates, among others, a file `test-DBItest.R` in the `tests/testthat` directory. Replace its entire contents by the following: ```r DBItest::make_context(Kazam(), NULL) DBItest::test_all() ``` The notes about "Kazam" and skipping tests from the previous section apply here as well. The `test_all()` function simply calls all test cases. ## External testing DBItest is currently geared towards usage as part of a package's test suite. With some effort it is possible to test a database backend against a custom database. This can help verify that your database installation gives expected results when accessed with DBI with specific connection arguments. The example below shows how to run tests with the RSQLite backend. ### Preparation First, we need to define a test context. It contains: - a connector that describes how to establish the database connection, see ``?DBI::`DBIConnector-class` `` for details, - tweaks, see `?tweaks`, - tests skipped by default, as a character vector. Database backends that use DBItest for testing usually have a file `test/testthat/helper-DBItest.R` or `test/testthat/test-DBItest.R` where a call to `make_context()` can be found. The help for `make_context()` already contains an example that works for RSQLite. Adapt it to your needs. The `make_context()` function must be called before any tests can run. ```{r make-context} library(DBItest) tweaks <- tweaks( constructor_relax_args = TRUE, placeholder_pattern = c("?", "$1", "$name", ":name"), date_cast = function(x) paste0("'", x, "'"), time_cast = function(x) paste0("'", x, "'"), timestamp_cast = function(x) paste0("'", x, "'"), logical_return = function(x) as.integer(x), date_typed = FALSE, time_typed = FALSE, timestamp_typed = FALSE ) default_skip <- c("roundtrip_date", "roundtrip_timestamp") invisible(make_context( new( "DBIConnector", .drv = RSQLite::SQLite(), .conn_args = list(dbname = tempfile("DBItest", fileext = ".sqlite")) ), tweaks = tweaks, default_skip = default_skip )) ``` ### Testing Use `test_all()` to run all tests, and `test_some()` to run a specific test that failed previously. The `test_*` functions need to be run with a testthat reporter to avoid stopping at the first error or warning. For interactive use, the "progress" reporter gives good results. In the example below, the "location" and "stop" reporters are combined. Review `?testthat::Reporter` for a list of reporters. ```{r simple, error = (getRversion() < "3.5") || !rlang::is_installed("dblog")} DBItest::test_some("get_query_atomic") ``` DBItest relies heavily on metaprogramming. Unfortunately, this means that a failing test may give no indication of the reason for the failure. The `test_some()` function now by default integrates the new experimental [dblog package](https://github.com/r-dbi/dblog) package. It prints the DBI code that is executed as the tests are run, as seen above. Another way to scout for the reason of the problem is to review the sources of DBItest and relate the test name (that is printed with each failure) with the human-readable specification embedded with the test code. ```{r location, error = (getRversion() < "3.5") || !rlang::is_installed("dblog")} testthat::with_reporter( c("location", "fail"), DBItest::test_some("get_query_atomic") ) ``` DBItest/inst/doc/DBItest.R0000644000176200001440000000224014157052537014722 0ustar liggesusers## ----setup, include = FALSE--------------------------------------------------- knitr::opts_chunk$set(error = (getRversion() < "3.5")) ## ----make-context------------------------------------------------------------- library(DBItest) tweaks <- tweaks( constructor_relax_args = TRUE, placeholder_pattern = c("?", "$1", "$name", ":name"), date_cast = function(x) paste0("'", x, "'"), time_cast = function(x) paste0("'", x, "'"), timestamp_cast = function(x) paste0("'", x, "'"), logical_return = function(x) as.integer(x), date_typed = FALSE, time_typed = FALSE, timestamp_typed = FALSE ) default_skip <- c("roundtrip_date", "roundtrip_timestamp") invisible(make_context( new( "DBIConnector", .drv = RSQLite::SQLite(), .conn_args = list(dbname = tempfile("DBItest", fileext = ".sqlite")) ), tweaks = tweaks, default_skip = default_skip )) ## ----simple, error = (getRversion() < "3.5") || !rlang::is_installed("dblog")---- DBItest::test_some("get_query_atomic") ## ----location, error = (getRversion() < "3.5") || !rlang::is_installed("dblog")---- testthat::with_reporter( c("location", "fail"), DBItest::test_some("get_query_atomic") ) DBItest/inst/WORDLIST0000644000176200001440000000066014157034343013725 0ustar liggesusersAppVeyor CII CMD Codecov coercible config DateTimeClasses dbi DBIDriver DBIResult dblog debugme desc dev devtools difftime DML extensibility hadley hostname Kazam lintr lookahead lookaheads metaprogramming noLD NOTEs parametrized README Reenable RMariaDB RMySQL roundtrip Roundtrip roundtripping roxygen RPostgres RSQLite rstats sql subclasses subqueries subquery termnull testthat untyped vctrs vectorization vectorized vectorizes