DBItest/0000755000176200001440000000000014725062511011553 5ustar liggesusersDBItest/tests/0000755000176200001440000000000014725060461012717 5ustar liggesusersDBItest/tests/testthat/0000755000176200001440000000000014725062511014555 5ustar liggesusersDBItest/tests/testthat/test-lint.R0000644000176200001440000000045414602017371016624 0ustar liggesuserstest_that("lintr is happy", { skip_on_cran() expect_false("package:DBI" %in% search()) require(DBI) on.exit(detach(), add = TRUE) expect_true("package:DBI" %in% search()) # lintr::expect_lint_free() detach() on.exit(NULL, add = FALSE) expect_false("package:DBI" %in% search()) }) DBItest/tests/testthat/test-context.R0000644000176200001440000000011714602017371017336 0ustar liggesuserstest_that("default context is NULL", { expect_null(get_default_context()) }) DBItest/tests/testthat/test-tweaks.R0000644000176200001440000000053114602017371017150 0ustar liggesuserstest_that("tweaks work as expected", { expect_true(names(formals(tweaks))[[1]] == "...") expect_warning(tweaks(`_oooops` = 42, `_darn` = -1), "_oooops, _darn") expect_warning(tweaks(), NA) expect_warning(tweaks(5), "named") expect_warning(tweaks(5, `_ooops` = 42), "named") expect_warning(tweaks(constructor_name = "constr"), NA) }) DBItest/tests/testthat/test-DBItest.R0000644000176200001440000000207614725054453017166 0ustar liggesusers# Generated by helper-dev.R, do not edit by hand skip_if_not_installed("RSQLite") skip_if_not_installed("nanoarrow") # helper-DBItest.R # Also copied into DBI tryCatch(skip = function(e) message(conditionMessage(e)), { skip_on_cran() skip_if_not_installed("DBItest") DBItest::make_context( RSQLite::SQLite(), list(dbname = tempfile("DBItest", fileext = ".sqlite")), tweaks = DBItest::tweaks( dbitest_version = "1.8.1", constructor_relax_args = TRUE, placeholder_pattern = c("?", "$1", "$name", ":name"), date_cast = function(x) paste0("'", x, "'"), time_cast = function(x) paste0("'", x, "'"), timestamp_cast = function(x) paste0("'", x, "'"), logical_return = function(x) as.integer(x), date_typed = FALSE, time_typed = FALSE, timestamp_typed = FALSE ), name = "RSQLite" ) }) # test-DBItest.R # Also copied into DBI skip_on_cran() skip_if_not_installed("DBItest") DBItest::test_all( skip = c( if (getRversion() < "4.0") "stream_bind_too_many" ) ) # Cleanup set_default_context(NULL) DBItest/tests/testthat/test-consistency.R0000644000176200001440000000154014602061640020212 0ustar liggesuserstest_that("no unnamed specs", { tests <- compact(spec_all) vicinity <- NULL if (any(names(tests) == "")) { vicinity <- sort(unique(unlist( map(which(names(tests) == ""), "+", -1:1) ))) vicinity <- vicinity[names(tests)[vicinity] != ""] } expect_null(vicinity) }) test_that("no duplicate spec names expect known exceptions", { all_names <- names(spec_all) dupe_names <- unique(all_names[duplicated(all_names)]) expect_equal(dupe_names, rep("", length(dupe_names))) }) test_that("all specs used", { env <- asNamespace("DBItest") defined_spec_names <- ls(env, pattern = "^spec_") defined_specs <- mget(defined_spec_names, env) defined_spec_names <- unlist(sapply(defined_specs, names), use.names = FALSE) new_names <- setdiff(defined_spec_names, names(spec_all)) expect_equal(new_names, rep("", length(new_names))) }) DBItest/tests/testthat/test-dbi.R0000644000176200001440000000023214602017371016406 0ustar liggesuserstest_that("Exported DBI methods as expected", { skip_if_not_installed("DBI", "1.1.3.9002") expect_equal(all_dbi_generics(), fetch_dbi_generics()) }) DBItest/tests/testthat.R0000644000176200001440000000056614602017371014705 0ustar liggesusers# This file is part of the standard setup for testthat. # It is recommended that you do not modify it. # # Where should you do additional test configuration? # Learn more about the roles of various files in: # * https://r-pkgs.org/tests.html # * https://testthat.r-lib.org/reference/test_package.html#special-files library(testthat) library(DBItest) test_check("DBItest") DBItest/MD50000644000176200001440000002370214725062511012067 0ustar liggesusers1de678dcd8b66f4b2ad6c7bf061d35dc *DESCRIPTION dca507f7d9671142c2d6e51d10708138 *NAMESPACE c972a7fe28e862970fb2fe7ab477f988 *NEWS.md f956e8e1290d2316d720831804645f34 *R/DBItest.R 4d17f00d368781248db4751f190649ae *R/compat-purrr.R 9c9d2da36801aff77af6140466172e96 *R/context.R ea81022cd6adf41ca686a7433e90ebd6 *R/dbi.R b716d1067f4cb58e0615c05d6caaf2df *R/dummy.R a26d2410330755d08c25c65a4e69a8e5 *R/expectations.R 67c8caa0be42cf97169841dac7205bc0 *R/generics.R 608489c294c631a8c2a1ea9dcc3ca9ed *R/import-dbi.R e66d90eee2a566dab8069fec17e961c6 *R/import-testthat.R 352730230006086279ab1c1a3e362091 *R/run.R 2cd97136984097bbf3dbb0cabe8962a8 *R/s4.R 49fb1ce74ed5875392f95c9a8079b127 *R/spec-.R 6a5dd7be473eed4ef27f56205d84496c *R/spec-all.R 14d1663a847bc8835082f11105fcda22 *R/spec-arrow-append-table-arrow.R 0b71bd1070b1f8e24eaef0734ec11063 *R/spec-arrow-bind.R 32958c6420578b8b2e26dd34204b1612 *R/spec-arrow-create-table-arrow.R e007e4e359886be83816c8b32a0821ba *R/spec-arrow-fetch-arrow-chunk.R 858ef4aa8eb0aeed29a3dae61fdcc37c *R/spec-arrow-fetch-arrow.R a96687b75d2ea4e9ab2a92eea8b5d8a8 *R/spec-arrow-get-query-arrow.R a30343f3d5883c26aca3f0f38473cba7 *R/spec-arrow-read-table-arrow.R e42619a4d73199608960aca79ab17b8e *R/spec-arrow-roundtrip.R 15882070755f101a36645bc71baee568 *R/spec-arrow-send-query-arrow.R 67d986faa572f76354a802cced11a65f *R/spec-arrow-write-table-arrow.R 2146b3bc32544f1f6ff97746523ddb4a *R/spec-arrow.R acc5f16561d2bd4b22a06b4ad8791597 *R/spec-compliance-methods.R d17825fd88154b476dae96a2def9fafa *R/spec-compliance.R e7219578c76f2692c5bda1edd6bc05c8 *R/spec-connection-data-type.R edc8326065f6dd9b004ad9d1e8a94ba7 *R/spec-connection-disconnect.R 588e2353dce42438c9603f91e90687ec *R/spec-connection-get-info.R 6ac682ce080ad9380d194d579751e626 *R/spec-connection.R 9a55bdfd6512afa37c7e2dc150533979 *R/spec-driver-connect.R 23d675e579f6614b0d052cc5cff6e460 *R/spec-driver-constructor.R b4ffb23ebb72697096e5322d8232dcae *R/spec-driver-data-type.R 1a96f1c1fd95cb0fe22b9ac6c524ec3e *R/spec-driver-get-info.R 93534b247811a7133844b731a913633e *R/spec-driver.R 9769903ac259ff0f6ce70d91990d2070 *R/spec-getting-started.R 7145fd06f4c548431e597ef68923eb2b *R/spec-meta-bind-.R ad8599b3ecaeb617b575429391509565 *R/spec-meta-bind-arrow-stream.R eb5cd6ee82c3ee046b692f5d9fcd60be *R/spec-meta-bind-arrow.R 12f01a30cdfb74c26f5e9d22f27c0bc4 *R/spec-meta-bind-expr.R dda9d0107c6b4691881bfb09c033da4c *R/spec-meta-bind-formals.R 290a9e3ed19fded8e2a3df01575f1459 *R/spec-meta-bind-runner.R e9b081a6a871fa61c587699c571961bf *R/spec-meta-bind-stream.R 677ee2a029b70ad264d598c97c06f68f *R/spec-meta-bind.R 0755c7aea305f55bec3ad55527c09a3d *R/spec-meta-column-info.R 906104cb25f4b8312f80ee459c803fca *R/spec-meta-get-info-result.R 710e3ccac63fc8d334462a52edb2f364 *R/spec-meta-get-row-count.R 9fee0749488a164f992929d35df6c7d6 *R/spec-meta-get-rows-affected.R 0a66af87747290902f08fd12b4017931 *R/spec-meta-get-statement.R 675d7b7e8b410c9caceeb53486dbe7f8 *R/spec-meta-has-completed.R d24e40cbf40a4d0031560e2dc19b5201 *R/spec-meta-is-valid.R 03d6063911596d839dfb1dc312ccd132 *R/spec-meta.R 6dc8229513802204cf2b110b831c780f *R/spec-result-clear-result.R 43e749f11f64c8d493f4a76b2a9c90ec *R/spec-result-create-table-with-data-type.R 2bb142f3d32f13544ab7bb7912a8d787 *R/spec-result-execute.R ad8398df3c5d6af772969e2946d02358 *R/spec-result-fetch.R 8ff80c9e963d403b6d3e35974fc0eb48 *R/spec-result-get-query.R 643f278f4773484657cb60d80a659766 *R/spec-result-roundtrip.R 2545176d3e796c552ed4add2bc25e937 *R/spec-result-send-query.R 3ca88695e2475c2d477ab451b7e7f94a *R/spec-result-send-statement.R 785e6f4d78b008af6100176911152250 *R/spec-result.R a0bfde2793128cceea1db5e0b5e611e7 *R/spec-sql-append-table.R 77377cc5ffe2103e17f9b0e67cd41345 *R/spec-sql-create-table.R cf3b6d701b65db510854490aa7a0689a *R/spec-sql-exists-table.R 9971d778fd836ad69628b64a31930442 *R/spec-sql-list-fields.R c84d3689c993d1eb644d9c44f833846d *R/spec-sql-list-objects.R d568ff9d2b34603a07bc98ea0b35c5b1 *R/spec-sql-list-tables.R 56f92f515179d9b354ae76c7643c2cec *R/spec-sql-quote-identifier.R b47fea7ce3642452a3e048d749547ba1 *R/spec-sql-quote-literal.R c22380bbb6ed5667741816cb8e643e32 *R/spec-sql-quote-string.R 92cd8400537763e09372f6ad580e769b *R/spec-sql-read-table.R abd5ad0f4d72303b73861278807ec26a *R/spec-sql-remove-table.R 5a8d09bb42187531b5d2e6f468a64875 *R/spec-sql-unquote-identifier.R 018037a8e2f5895aec11fc490eec4fd4 *R/spec-sql-write-table.R 9959baeb602b7e6897aaea437bb51492 *R/spec-sql.R 13c580fc6e6c7b3d34c192f322e9550e *R/spec-stress-connection.R e5c8c247e5b41ae3186378b39996fb85 *R/spec-stress.R d93d8b2ed4a34a281e61e8a420e24157 *R/spec-transaction-begin-commit-rollback.R 316b06e2e9cef8014e96f684067190e8 *R/spec-transaction-with-transaction.R dec1d80d14bdc41a93eea2acc9b24d2f *R/spec-transaction.R 892cf7c45066ebf3ec70282fa0dbfa86 *R/test-all.R 47eb1ab25d28740cc2e2e6ee2ac8e85e *R/test-arrow.R 083748ddbd2de6965462b8c1f32dd2fa *R/test-compliance.R 559cf124790435fabdc22b66fc27087f *R/test-connection.R 30f0ebdf518955c83beb5555bcd3b0a3 *R/test-driver.R 1b3088ec4d7b7c1f66488e665827cef5 *R/test-getting-started.R ac9da359e3155bdcc7d911b62e1c27e6 *R/test-meta.R dd0fddb6f302c4feb92d732422c536be *R/test-result.R 6706e08f8fa9da4b8917bdd05daf0f69 *R/test-sql.R 9fa9a511dbf8116e9a083f8cf07a28ac *R/test-stress.R 820ed3d6a4fd8630a16b01b80799d7c3 *R/test-transaction.R cb27fab853d31cd647dff2bca3946a12 *R/test_backend.R fcac1f9400e5b46f915f606f66ea3777 *R/tweaks.R 3ebd7affb36d6acc1c1f696f732a19b7 *R/utf8.R 75177ac58aaad5c8f1f858479bc893bf *R/utils.R bd74f305925817f09dd5b5157cc3e9d0 *R/zzz.R f5a3ad63ad8948897d9555f28e0f2cb7 *README.md 9977d8d377f3511d18afde7340bb8eb1 *build/vignette.rds d7f5619c1fded073e01288d42c964462 *inst/WORDLIST 972e3541cc5a1a040c28be21cd2283f6 *inst/doc/DBItest.R ea281afd0fbe43cb463e8efbe712d087 *inst/doc/DBItest.Rmd e60231965fc687b3d30b0769c1a57ac7 *inst/doc/DBItest.html a24004c2656a5e89f70ecfa72d82380d *man/DBItest-package.Rd 9f9e192ca510a4e5c65543f8aee5747c *man/context.Rd 0d3380f496d0caf0eede3661fbfe9a38 *man/make_placeholder_fun.Rd e9fe14b9eee71a94ad28a1a1457e0684 *man/spec_arrow_append_table_arrow.Rd 626eb2c4aa06c9557b1047205a7c944c *man/spec_arrow_create_table_arrow.Rd a83c2dd6ad12701a8a4336bfe46a8faf *man/spec_arrow_fetch_arrow.Rd 44ed01d7760c504eb11451bd0d80991b *man/spec_arrow_fetch_arrow_chunk.Rd 860b553b1fd443b6473f968711fff9f9 *man/spec_arrow_get_query_arrow.Rd 67a5fa56217705271dcabe923a0e8077 *man/spec_arrow_read_table_arrow.Rd 12771f6aa63b6efbf55f41d6c69c1aa1 *man/spec_arrow_send_query_arrow.Rd ae8159244dc1c3a0caee8904ebacb4f5 *man/spec_arrow_write_table_arrow.Rd d03bdb8f3efead81f35916a5f2bc1d30 *man/spec_compliance_methods.Rd c9d58efac3f047a7938327029a284df9 *man/spec_connection_disconnect.Rd f22c304dbc49066ede7a9e0547042b32 *man/spec_driver_connect.Rd 7e46983148ac74597eaa21556179b462 *man/spec_driver_constructor.Rd b5d389ac458d87c742b346df0f1b1506 *man/spec_driver_data_type.Rd 29c0ce87e65c06df9cff9a64d815165b *man/spec_get_info.Rd 2db06c65cf13d292a64c0fc62d36308a *man/spec_getting_started.Rd f56766bb15c5fc4a62c06a185b448e77 *man/spec_meta_bind.Rd 04fc9c1acc953edef94256ef9f32e8bd *man/spec_meta_column_info.Rd 2bbffe0862ab1d89e0c7b64592f7a097 *man/spec_meta_get_row_count.Rd 825380b960f785008ba3f4ca46833801 *man/spec_meta_get_rows_affected.Rd c93860deec34175d341230f363fc25bf *man/spec_meta_get_statement.Rd 0623af258fdc960ed3598b5f66f9d691 *man/spec_meta_has_completed.Rd 0ed8a4314011fe0162708fbaf1285d43 *man/spec_meta_is_valid.Rd dfd8bbbb68e361c902b3595df4afcf1c *man/spec_result_clear_result.Rd 9dc39bfc301eb8e4cf7594bfba938011 *man/spec_result_create_table_with_data_type.Rd e12eb79fa05984ec99c1783da205c127 *man/spec_result_execute.Rd bcadc7c054b99a0aa411d60a3c3f00ac *man/spec_result_fetch.Rd 25e14d281a3a20410d899708ef45cf03 *man/spec_result_get_query.Rd 37542de93ce3b6e1a288bbc131f66864 *man/spec_result_roundtrip.Rd a85cebe87f4731f707c790584097924a *man/spec_result_send_query.Rd ad54305934266694198c834456a65743 *man/spec_result_send_statement.Rd ce1232d25f4858f4b96baf69ad0a1ae5 *man/spec_sql_append_table.Rd 24b313c9b46ae4cf57e3980f7359025e *man/spec_sql_create_table.Rd 6bcfa1da2d123e43da2ca61abb8d996a *man/spec_sql_exists_table.Rd c9044dabe327c75db0ef820221e98485 *man/spec_sql_list_fields.Rd a9c9e0aa3eb756907a0aa410b258f079 *man/spec_sql_list_objects.Rd 0775ee7f0779349373c62cfe7800bff4 *man/spec_sql_list_tables.Rd 35a6a2680c05be47cf47fdc4f62049a7 *man/spec_sql_quote_identifier.Rd 1428f8b9fbe3db16d4cea9dac04996d8 *man/spec_sql_quote_literal.Rd 82b379576902273416a4e2816ab502cc *man/spec_sql_quote_string.Rd 2ddc3e747588d8cf784384961b96d821 *man/spec_sql_read_table.Rd 7fa43adc609e0862906c10cfac504995 *man/spec_sql_remove_table.Rd ad47c355eb398ba5bf67c1187a4f3b97 *man/spec_sql_unquote_identifier.Rd ef6e2ddfb56b9f1ae2deddc1700e04ff *man/spec_sql_write_table.Rd 8443457cc6459e4452b3d0294c9d5d78 *man/spec_transaction_begin_commit_rollback.Rd cf047c8f18c32baa89e090c875477f46 *man/spec_transaction_with_transaction.Rd 31b1d0f56707088d18aca478f3b7ce9a *man/test_all.Rd 1181a9c0b8d0b9d22b16c5873fbbe907 *man/test_arrow.Rd b00c55c0b9f13f0194996686a7c13511 *man/test_compliance.Rd 81899624492de475696d8bdc188707f2 *man/test_connection.Rd 90c4ecb858145c31a3ce06a4f11b83f8 *man/test_data_type.Rd 3e4b741bb4e8c66f5473106a8b342959 *man/test_driver.Rd fc261635b5784573683ccf89a1dbefe0 *man/test_getting_started.Rd c227e4ec70e52546b6b1dde7ce882805 *man/test_meta.Rd 53301354c5121b59f969f646e52dd1cc *man/test_result.Rd c649a1a8a7df15881b21b5f3302f34fe *man/test_sql.Rd 1fdee0ade97da7c84e854a14f5cc329d *man/test_stress.Rd 0d76aba8a3d3f7b3bb77ace85a58df91 *man/test_transaction.Rd 83674415848b05cf0cdced666b9b495c *man/tweaks.Rd 8dc017d96ecf7e7db5331c904f097b98 *tests/testthat.R 354157ab56b3cf91c1c80432e79eb76a *tests/testthat/test-DBItest.R 366872df559316464fa4626a5ca4bb93 *tests/testthat/test-consistency.R 932bcd764ef4dffdf9fda91b1d8aa044 *tests/testthat/test-context.R fe13ef1e56eb91135bd8cd5c915b4e95 *tests/testthat/test-dbi.R dceab4277c7f65f7f3c2891409310992 *tests/testthat/test-lint.R a5f72b2ea1354d200f4bb1fd33ad9bf9 *tests/testthat/test-tweaks.R ea281afd0fbe43cb463e8efbe712d087 *vignettes/DBItest.Rmd DBItest/.aspell/0000755000176200001440000000000014602017371013107 5ustar liggesusersDBItest/.aspell/DBI.rds0000644000176200001440000000014714602017371014221 0ustar liggesusers‹‹àb```b`a’Ì@&ƒs0°0p‚h§ÄäìÔ¼”b(ŸÙÅÉÊd÷M,ÊLtq‚rY}+ƒ} ®€üâ’ô¢T„Gªr– ä‚(› ¨,³$ expect_equal(names(formals(dbClearResult)), c("res", "...")) }, clear_result_return_query = function(con) { #' @return #' `dbClearResult()` returns `TRUE`, invisibly, for result sets obtained from #' `dbSendQuery()`, res <- dbSendQuery(con, trivial_query()) expect_invisible_true(dbClearResult(res)) }, clear_result_return_statement = function(ctx, con, table_name) { #' `dbSendStatement()`, res <- dbSendStatement(con, ctx$tweaks$create_table_as(table_name, "SELECT 1")) expect_invisible_true(dbClearResult(res)) }, clear_result_return_query_arrow = function(ctx, con, table_name) { # Failed on SQL Server skip_if_not_dbitest(ctx, "1.7.99.3") #' or `dbSendQueryArrow()`, res <- dbSendQueryArrow(con, ctx$tweaks$create_table_as(table_name, "SELECT 1")) expect_invisible_true(dbClearResult(res)) }, #' cannot_clear_result_twice_query = function(con) { #' @section Failure modes: #' An attempt to close an already closed result set issues a warning #' for `dbSendQuery()`, res <- dbSendQuery(con, trivial_query()) dbClearResult(res) expect_warning(expect_invisible_true(dbClearResult(res))) }, cannot_clear_result_twice_statement = function(ctx, con, table_name) { #' `dbSendStatement()`, res <- dbSendStatement(con, ctx$tweaks$create_table_as(table_name, "SELECT 1")) dbClearResult(res) expect_warning(expect_invisible_true(dbClearResult(res))) }, cannot_clear_result_twice_query_arrow = function(ctx, con, table_name) { # Failed on SQL Server skip_if_not_dbitest(ctx, "1.7.99.4") #' and `dbSendQueryArrow()`, res <- dbSendQueryArrow(con, ctx$tweaks$create_table_as(table_name, "SELECT 1")) dbClearResult(res) expect_warning(expect_invisible_true(dbClearResult(res))) }, #' @section Specification: #' `dbClearResult()` frees all resources associated with retrieving #' the result of a query or update operation. #' The DBI backend can expect a call to `dbClearResult()` for each #' [dbSendQuery()] or [dbSendStatement()] call. NULL ) DBItest/R/spec-result-fetch.R0000644000176200001440000001500514602020561015426 0ustar liggesusers#' spec_result_fetch #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_fetch <- list( fetch_formals = function() { # expect_equal(names(formals(dbFetch)), c("res", "n", "...")) }, fetch_atomic = function(con) { #' @return #' `dbFetch()` always returns a [data.frame] with #' as many rows as records were fetched and as many #' columns as fields in the result set, #' even if the result is a single value query <- trivial_query() res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res)) expect_equal(rows, data.frame(a = 1.5)) }, fetch_one_row = function(con) { #' or has one query <- trivial_query(3, letters[1:3]) result <- trivial_df(3, letters[1:3]) res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res)) expect_identical(rows, result) }, fetch_zero_rows = function(con) { #' or zero rows. query <- "SELECT * FROM (SELECT 1 as a, 2 as b, 3 as c) AS x WHERE (1 = 0)" res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res)) expect_identical(class(rows), "data.frame") }, fetch_na_rows = function(ctx, con) { if (as.package_version(ctx$tweaks$dbitest_version) < "1.7.4") { skip(paste0("tweak: dbitest_version: ", ctx$tweaks$dbitest_version)) } #' Passing `n = NA` is supported and returns an arbitrary number of rows (at least one) #' as specified by the driver, but at most the remaining rows in the result set. query <- trivial_query() res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res, n = NA)) expect_equal(rows, data.frame(a = 1.5)) }, #' fetch_closed = function(con) { #' @section Failure modes: #' An attempt to fetch from a closed result set raises an error. query <- trivial_query() res <- dbSendQuery(con, query) dbClearResult(res) expect_error(dbFetch(res)) }, fetch_n_bad = function(con) { #' If the `n` argument is not an atomic whole number #' greater or equal to -1 or Inf, an error is raised, query <- trivial_query() res <- local_result(dbSendQuery(con, query)) expect_error(dbFetch(res, -2)) expect_error(dbFetch(res, 1.5)) expect_error(dbFetch(res, integer())) expect_error(dbFetch(res, 1:3)) }, fetch_n_good_after_bad = function(con) { #' but a subsequent call to `dbFetch()` with proper `n` argument succeeds. query <- trivial_query() res <- local_result(dbSendQuery(con, query)) expect_error(dbFetch(res, -2)) rows <- check_df(dbFetch(res)) expect_equal(rows, data.frame(a = 1.5)) }, fetch_no_return_value = function(con, table_name) { #' #' Calling `dbFetch()` on a result set from a data manipulation query #' created by [dbSendStatement()] can #' be fetched and return an empty data frame, with a warning. query <- paste0("CREATE TABLE ", table_name, " (a integer)") res <- local_result(dbSendStatement(con, query)) expect_warning(rows <- check_df(dbFetch(res))) expect_identical(rows, data.frame()) }, fetch_multi_row_single_column = function(ctx, con) { #' @section Specification: #' Fetching multi-row queries with one query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(3) res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res)) expect_identical(rows, result) }, fetch_multi_row_multi_column = function(ctx, con) { #' or more columns by default returns the entire result. query <- sql_union( .ctx = ctx, paste("SELECT", 1:5 + 0.5, "AS a,", 4:0 + 0.5, "AS b"), .order_by = "a" ) res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res)) expect_identical(rows, data.frame(a = 1:5 + 0.5, b = 4:0 + 0.5)) }, fetch_n_progressive = function(ctx, con) { #' Multi-row queries can also be fetched progressively query <- trivial_query(25, .ctx = ctx, .order_by = "a") result <- trivial_df(25) res <- local_result(dbSendQuery(con, query)) #' by passing a whole number ([integer] or rows <- check_df(dbFetch(res, 10L)) expect_identical(rows, unrowname(result[1:10, , drop = FALSE])) #' [numeric]) rows <- check_df(dbFetch(res, 10)) expect_identical(rows, unrowname(result[11:20, , drop = FALSE])) #' as the `n` argument. rows <- check_df(dbFetch(res, n = 5)) expect_identical(rows, unrowname(result[21:25, , drop = FALSE])) }, fetch_n_multi_row_inf = function(ctx, con) { #' A value of [Inf] for the `n` argument is supported #' and also returns the full result. query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(3) res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res, n = Inf)) expect_identical(rows, result) }, fetch_n_more_rows = function(ctx, con) { #' If more rows than available are fetched, the result is returned in full #' without warning. query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(3) res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res, 5L)) expect_identical(rows, result) #' If fewer rows than requested are returned, further fetches will #' return a data frame with zero rows. rows <- check_df(dbFetch(res)) expect_identical(rows, result[0, , drop = FALSE]) }, fetch_n_zero_rows = function(ctx, con) { #' If zero rows are fetched, the columns of the data frame are still fully #' typed. query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(0) res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res, 0L)) expect_identical(rows, result) }, fetch_n_premature_close = function(ctx, con) { #' Fetching fewer rows than available is permitted, #' no warning is issued when clearing the result set. query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(2) res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res, 2L)) expect_identical(rows, result) }, #' fetch_row_names = function(con) { #' A column named `row_names` is treated like any other column. query <- trivial_query(column = "row_names") result <- trivial_df(column = "row_names") res <- local_result(dbSendQuery(con, query)) rows <- check_df(dbFetch(res)) expect_identical(rows, result) expect_identical(.row_names_info(rows), -1L) }, # NULL ) DBItest/R/spec-result-create-table-with-data-type.R0000644000176200001440000000225514602017371021534 0ustar liggesusers#' spec_result_create_table_with_data_type #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_create_table_with_data_type <- list( data_type_create_table = function(ctx, con) { #' @section Specification: #' All data types returned by `dbDataType()` are usable in an SQL statement #' of the form check_connection_data_type <- function(value) { table_name <- random_table_name() local_remove_test_table(con, table_name) #' `"CREATE TABLE test (a ...)"`. query <- paste0("CREATE TABLE ", table_name, " (a ", dbDataType(con, value), ")") eval(bquote(dbExecute(con, .(query)))) } expect_conn_has_data_type <- function(value) { eval(bquote( expect_error(check_connection_data_type(.(value)), NA) )) } expect_conn_has_data_type(logical(1)) expect_conn_has_data_type(integer(1)) expect_conn_has_data_type(numeric(1)) expect_conn_has_data_type(character(1)) expect_conn_has_data_type(Sys.Date()) expect_conn_has_data_type(Sys.time()) if (!isTRUE(ctx$tweaks$omit_blob_tests)) { expect_conn_has_data_type(list(as.raw(0:10))) } }, # NULL ) DBItest/R/test-result.R0000644000176200001440000000061114602017371014366 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_result()]: #' Test the "Result" class NULL #' Test the "Result" class #' #' @inheritParams test_all #' @include test-connection.R #' @family tests #' @export test_result <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Result" run_tests(ctx, spec_result, skip, run_only, test_suite) } DBItest/R/spec-sql-quote-string.R0000644000176200001440000001176714602017371016277 0ustar liggesusers#' spec_sql_quote_string #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_quote_string <- list( quote_string_formals = function() { # expect_equal(names(formals(dbQuoteString)), c("conn", "x", "...")) }, quote_string_return = function(con) { #' @return #' `dbQuoteString()` returns an object that can be coerced to [character], simple <- "simple" simple_out <- dbQuoteString(con, simple) expect_error(as.character(simple_out), NA) expect_type(as.character(simple_out), "character") expect_equal(length(simple_out), 1L) }, # quote_string_vectorized = function(con) { #' of the same length as the input. letters_out <- dbQuoteString(con, letters) expect_equal(length(letters_out), length(letters)) #' For an empty character vector this function returns a length-0 object. empty_out <- dbQuoteString(con, character()) expect_equal(length(empty_out), 0L) }, # quote_string_double = function(con) { simple <- "simple" simple_out <- dbQuoteString(con, simple) letters_out <- dbQuoteString(con, letters) empty <- character() empty_out <- dbQuoteString(con, character()) #' #' When passing the returned object again to `dbQuoteString()` #' as `x` #' argument, it is returned unchanged. expect_identical(dbQuoteString(con, simple_out), simple_out) expect_identical(dbQuoteString(con, letters_out), letters_out) expect_identical(dbQuoteString(con, empty_out), empty_out) #' Passing objects of class [SQL] should also return them unchanged. expect_identical(dbQuoteString(con, SQL(simple)), SQL(simple)) expect_identical(dbQuoteString(con, SQL(letters)), SQL(letters)) expect_identical(dbQuoteString(con, SQL(empty)), SQL(empty)) #' (For backends it may be most convenient to return [SQL] objects #' to achieve this behavior, but this is not required.) }, quote_string_roundtrip = function(ctx, con) { #' @section Specification: do_test_string <- function(x) { #' The returned expression can be used in a `SELECT ...` query, query <- paste0("SELECT ", paste(dbQuoteString(con, x), collapse = ", ")) #' and for any scalar character `x` the value of #' \code{dbGetQuery(paste0("SELECT ", dbQuoteString(x)))[[1]]} #' must be identical to `x`, x_out <- check_df(dbGetQuery(con, query)) expect_equal(nrow(x_out), 1L) expect_identical(unlist(unname(x_out)), x) } expand_char <- function(...) { df <- expand.grid(..., stringsAsFactors = FALSE) do.call(paste0, df) } test_chars <- c( #' even if `x` contains "", #' spaces, " ", #' tabs, "\t", #' quotes (single "'", #' or double), '"', #' backticks, "`", #' or newlines "\n" ) #' (in any combination) # length(test_chars) ** 3 test_strings_0 <- expand_char(test_chars, "a", test_chars, "b", test_chars) #' or is itself the result of a `dbQuoteString()` call coerced back to #' character (even repeatedly). test_strings_1 <- as.character(dbQuoteString(con, test_strings_0)) test_strings_2 <- as.character(dbQuoteString(con, test_strings_1)) test_strings <- c(test_strings_0, test_strings_1, test_strings_2) do_test_string(test_strings) }, # quote_string_na = function(ctx, con) { null <- dbQuoteString(con, NA_character_) quoted_null <- dbQuoteString(con, as.character(null)) na <- dbQuoteString(con, "NA") quoted_na <- dbQuoteString(con, as.character(na)) query <- paste0( "SELECT ", null, " AS null_return,", na, " AS na_return,", quoted_null, " AS quoted_null,", quoted_na, " AS quoted_na" ) #' If `x` is `NA`, the result must merely satisfy [is.na()]. rows <- check_df(dbGetQuery(con, query)) expect_true(is.na(rows$null_return)) #' The strings `"NA"` or `"NULL"` are not treated specially. expect_identical(rows$na_return, "NA") expect_identical(rows$quoted_null, as.character(null)) expect_identical(rows$quoted_na, as.character(na)) }, # #' quote_string_na_is_null = function(ctx, con) { #' `NA` should be translated to an unquoted SQL `NULL`, null <- dbQuoteString(con, NA_character_) #' so that the query `SELECT * FROM (SELECT 1) a WHERE ... IS NULL` rows <- check_df(dbGetQuery(con, paste0("SELECT * FROM (SELECT 1) a WHERE ", null, " IS NULL"))) #' returns one row. expect_equal(nrow(rows), 1L) }, #' quote_string_error = function(ctx, con) { #' @section Failure modes: #' #' Passing a numeric, expect_error(dbQuoteString(con, c(1, 2, 3))) #' integer, expect_error(dbQuoteString(con, 1:3)) #' logical, expect_error(dbQuoteString(con, c(TRUE, FALSE))) #' or raw vector, expect_error(dbQuoteString(con, as.raw(1:3))) #' or a list expect_error(dbQuoteString(con, as.list(1:3))) #' for the `x` argument raises an error. }, # NULL ) DBItest/R/spec-meta-get-row-count.R0000644000176200001440000000516014602017371016465 0ustar liggesusers#' spec_meta_get_row_count #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_get_row_count <- list( get_row_count_formals = function() { # expect_equal(names(formals(dbGetRowCount)), c("res", "...")) }, row_count_query = function(con) { #' @return #' `dbGetRowCount()` returns a scalar number (integer or numeric), #' the number of rows fetched so far. query <- trivial_query() #' After calling [dbSendQuery()], res <- local_result(dbSendQuery(con, query)) rc <- dbGetRowCount(res) #' the row count is initially zero. expect_equal(rc, 0L) #' After a call to [dbFetch()] without limit, check_df(dbFetch(res)) rc <- dbGetRowCount(res) #' the row count matches the total number of rows returned. expect_equal(rc, 1L) }, # row_count_query_limited = function(ctx, con) { query <- sql_union(.ctx = ctx, trivial_query(), "SELECT 2", "SELECT 3") res <- local_result(dbSendQuery(con, query)) rc1 <- dbGetRowCount(res) expect_equal(rc1, 0L) #' Fetching a limited number of rows check_df(dbFetch(res, 2L)) #' increases the number of rows by the number of rows returned, rc2 <- dbGetRowCount(res) expect_equal(rc2, 2L) #' even if fetching past the end of the result set. check_df(dbFetch(res, 2L)) rc3 <- dbGetRowCount(res) expect_equal(rc3, 3L) }, # row_count_query_empty = function(ctx, con) { #' For queries with an empty result set, query <- sql_union( .ctx = ctx, "SELECT * FROM (SELECT 1 as a) a WHERE (0 = 1)" ) res <- local_result(dbSendQuery(con, query)) rc <- dbGetRowCount(res) #' zero is returned expect_equal(rc, 0L) check_df(dbFetch(res)) rc <- dbGetRowCount(res) #' even after fetching. expect_equal(rc, 0L) }, # row_count_statement = function(con, table_name) { query <- paste0("CREATE TABLE ", table_name, " (a integer)") #' For data manipulation statements issued with #' [dbSendStatement()], res <- local_result(dbSendStatement(con, query)) rc <- dbGetRowCount(res) #' zero is returned before expect_equal(rc, 0L) expect_warning(check_df(dbFetch(res))) rc <- dbGetRowCount(res) #' and after calling `dbFetch()`. expect_equal(rc, 0L) }, #' get_row_count_error = function(con) { #' @section Failure modes: res <- dbSendQuery(con, trivial_query()) dbClearResult(res) #' Attempting to get the row count for a result set cleared with #' [dbClearResult()] gives an error. expect_error(dbGetRowCount(res)) }, # NULL ) DBItest/R/test-arrow.R0000644000176200001440000000057414602017371014212 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_arrow()]: #' Test Arrow methods NULL #' Test Arrow methods #' #' @inheritParams test_all #' @include test-transaction.R #' @family tests #' @export test_arrow <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Arrow" run_tests(ctx, spec_arrow, skip, run_only, test_suite) } DBItest/R/context.R0000644000176200001440000000567314602017371013574 0ustar liggesusers#' Test contexts #' #' Create a test context, set and query the default context. #' #' @param drv `[DBIConnector]`\cr #' An object of class [DBIConnector-class] that describes how to connect #' to the database. #' @param connect_args `[named list]`\cr Deprecated. #' @param set_as_default `[logical(1)]`\cr Should the created context be #' set as default context? #' @param tweaks `[DBItest_tweaks]`\cr Tweaks as constructed by the #' [tweaks()] function. #' @param ctx `[DBItest_context]`\cr A test context. #' @param name `[character]`\cr An optional name of the context which will #' be used in test messages. #' @param default_skip `[character]`\cr Default value of `skip` argument #' to [test_all()] and other testing functions. #' #' @return `[DBItest_context]`\cr A test context, for #' `set_default_context` the previous default context (invisibly) or #' `NULL`. #' #' @rdname context #' @importFrom methods is new #' @export #' @examplesIf requireNamespace("RSQLite", quietly = TRUE) #' make_context( #' new( #' "DBIConnector", #' .drv = RSQLite::SQLite(), #' .conn_args = list(dbname = tempfile("DBItest", fileext = ".sqlite")) #' ), #' tweaks = tweaks( #' constructor_relax_args = TRUE, #' placeholder_pattern = c("?", "$1", "$name", ":name"), #' date_cast = function(x) paste0("'", x, "'"), #' time_cast = function(x) paste0("'", x, "'"), #' timestamp_cast = function(x) paste0("'", x, "'"), #' logical_return = function(x) as.integer(x), #' date_typed = FALSE, #' time_typed = FALSE, #' timestamp_typed = FALSE #' ), #' default_skip = c("roundtrip_date", "roundtrip_timestamp") #' ) make_context <- function(drv, connect_args = NULL, set_as_default = TRUE, tweaks = NULL, name = NULL, default_skip = NULL) { if (is.null(drv)) { abort("drv cannot be NULL.") } if (is(drv, "DBIDriver")) { if (is.null(connect_args)) { connect_args <- list() } cnr <- new("DBIConnector", .drv = drv, .conn_args = connect_args) } else if (is(drv, "DBIConnector")) { cnr <- drv drv <- cnr@.drv } else { abort("drv must be of class DBIDriver or DBIConnector.") } if (is.null(tweaks)) { tweaks <- tweaks() } ctx <- structure( list( cnr = cnr, drv = drv, tweaks = tweaks, name = name, default_skip = default_skip ), class = "DBItest_context" ) if (set_as_default) { set_default_context(ctx) } ctx } #' @rdname context #' @export set_default_context <- function(ctx) { old_ctx <- .ctx_env$default_context .ctx_env$default_context <- ctx invisible(old_ctx) } #' @rdname context #' @export get_default_context <- function() { .ctx_env$default_context } package_name <- function(ctx) { attr(class(ctx$drv), "package") } connect <- function(ctx, ...) { quos <- enquos(...) eval_tidy(quo(dbConnect(ctx$cnr, !!!quos))) } .ctx_env <- new.env(parent = emptyenv()) set_default_context(NULL) DBItest/R/spec-meta-bind-runner.R0000644000176200001440000001764214725053661016216 0ustar liggesuserstest_select_bind_expr_one <- list() #' spec_meta_bind #' @family meta specifications #' @name spec_meta_bind #' @usage NULL #' @format NULL #' @keywords NULL #' @section Specification: #' \pkg{DBI} clients execute parametrized statements as follows: #' test_select_bind_expr_one$fun <- function( bind_values, ..., arrow, bind, query = TRUE, has_cast_fun = FALSE, check_return_value = NULL, patch_bind_values = NULL, bind_error = NA, warn = FALSE, is_repeated = FALSE, is_premature_clear = FALSE, is_untouched = FALSE) { check_dots_empty() force(bind_values) force(arrow) force(bind) force(query) force(check_return_value) force(patch_bind_values) force(bind_error) force(is_repeated) force(is_premature_clear) force(is_untouched) bind_values_expr <- if (bind == "stream") expr({ bind_values <- !!construct_expr(fix_params(bind_values)) }) else expr({ bind_values <- !!construct_expr(bind_values) }) set_bind_values_patched_expr <- if (!is.null(patch_bind_values)) expr({ bind_values_patched <- !!body(patch_bind_values) }) bind_values_patched_expr_base <- if (is.null(patch_bind_values)) expr({ bind_values }) else expr({ bind_values_patched }) cast_fun_placeholder_expr <- if (has_cast_fun) expr({ cast_fun(placeholder) }) else expr({ placeholder }) is_na <- which(map_lgl(bind_values, is_na_or_null)) result_names <- letters[seq_along(bind_values)] #' 1. Call [dbSendQuery()], [dbSendQueryArrow()] or [dbSendStatement()] #' with a query or statement that contains placeholders, #' store the returned [DBIResult-class] object in a variable. #' Mixing placeholders (in particular, named and unnamed ones) is not #' recommended. send_expr <- if (query) expr({ placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", (!!cast_fun_placeholder_expr), " = ", placeholder_values, ")") !!if (length(is_na) > 0) expr({ result_check[!!construct_expr(is_na)] <- paste0("(", is_null_check((!!cast_fun_placeholder_expr)[!!construct_expr(is_na)]), ")") }) sql <- "SELECT " !!!map2( seq_along(result_names), result_names, ~ expr({ sql <- paste0( sql, "CASE WHEN ", result_check[[!!.x]], !!paste0( " THEN ", trivial_values(2)[[1]], " ELSE ", trivial_values(2)[[2]], " END AS ", .y, if (.x < length(result_names)) ", " ) ) }) ) res <- (!!if (arrow == "none") expr(dbSendQuery) else expr(dbSendQueryArrow))(con, sql) }) else expr({ data <- data.frame(a = rep(1:5, 1:5), b = 1:15) table_name <- random_table_name() dbWriteTable(con, table_name, data, temporary = TRUE) sql <- paste0("UPDATE ", dbQuoteIdentifier(con, table_name), " SET b = b + 1 WHERE ") !!!map2(result_names, seq_along(result_names), ~ expr({ sql <- paste0( sql, !!paste0(.x, " = "), placeholder[[!!.y]], !!!if (.y < length(result_names)) " AND " ) })) res <- dbSendStatement(con, sql) }) #' It is good practice to register a call to [dbClearResult()] via #' [on.exit()] right after calling `dbSendQuery()` or `dbSendStatement()` #' (see the last enumeration item). clear_expr <- if (is_premature_clear) expr({ dbClearResult(res) }) else expr({ on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) !!if (!is.null(check_return_value)) expr({ #' Until [dbBind()] or [dbBindArrow()] have been called, #' the returned result set object has the following behavior: !!if (query) expr({ #' - [dbFetch()] raises an error (for `dbSendQuery()` and `dbSendQueryArrow()`) expect_error(dbFetch(res)) #' - [dbGetRowCount()] returns zero (for `dbSendQuery()` and `dbSendQueryArrow()`) expect_equal(dbGetRowCount(res), 0) }) else expr({ #' - [dbGetRowsAffected()] returns an integer `NA` (for `dbSendStatement()`) expect_identical(dbGetRowsAffected(res), NA_integer_) }) #' - [dbIsValid()] returns `TRUE` expect_true(dbIsValid(res)) #' - [dbHasCompleted()] returns `FALSE` expect_false(dbHasCompleted(res)) }) }) #' 1. Call [dbBind()] or [dbBindArrow()]: bind_values_patched_expr <- if (bind == "df") expr({ #' - For [dbBind()], the `params` argument must be a list where all elements #' have the same lengths and contain values supported by the backend. #' A [data.frame] is internally stored as such a list. dbBind(res, !!bind_values_patched_expr_base) }) else expr({ #' - For [dbBindArrow()], the `params` argument must be a #' nanoarrow array stream, with one column per query parameter. dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(!!bind_values_patched_expr_base)) }) name_values_expr <- expr({ placeholder <- placeholder_fun(!!length(bind_values)) names(bind_values) <- names(placeholder) }) bind_expr <- if (!is.null(check_return_value)) expr({ bind_res <- withVisible(!!bind_values_patched_expr) !!body(check_return_value) }) else if (isTRUE(warn)) expr({ suppressWarnings(expect_warning(!!bind_values_patched_expr)) }) else if (is.na(bind_error)) expr({ !!bind_values_patched_expr }) else expr({ expect_error(!!bind_values_patched_expr, !!bind_error) }) #' 1. Retrieve the data or the number of affected rows from the `DBIResult` object. #' - For queries issued by `dbSendQuery()` or `dbSendQueryArrow()`, call [dbFetch()]. retrieve_expr <- if (query) expr({ rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), !!length(bind_values[[1]])) # Not checking more specifically in the case of zero rows because of RSQLite !!if (length(bind_values[[1]]) > 0) expr({ result <- !!construct_expr({ result_names <- letters[seq_along(bind_values)] expected <- c(trivial_values(1), rep(trivial_values(2)[[2]], length(bind_values[[1]]) - 1)) all_expected <- rep(list(expected), length(bind_values)) as.data.frame(setNames(all_expected, result_names)) }) expect_equal(rows, result) }) }) else expr({ #' - For statements issued by `dbSendStatements()`, #' call [dbGetRowsAffected()]. #' (Execution begins immediately after the [dbBind()] call, #' the statement is processed entirely before the function returns.) rows_affected <- dbGetRowsAffected(res) # Allow NA value for dbGetRowsAffected(), #297 if (!isTRUE(allow_na_rows_affected) || !is.na(rows_affected)) { expect_equal(rows_affected, !!sum(bind_values[[1]])) } }) not_untouched_expr <- if (!is_untouched) expr({ !!retrieve_expr }) #' 1. Repeat 2. and 3. as necessary. repeated_expr <- if (is_repeated) expr({ !!bind_expr !!retrieve_expr }) early_exit <- is_premature_clear || !is.na(bind_error) || (!is.null(patch_bind_values) && !identical(bind_values, patch_bind_values(bind_values))) post_bind_expr <- if (!early_exit) expr({ !!not_untouched_expr !!repeated_expr }) #' 1. Close the result set via [dbClearResult()]. clear_now_expr <- if (!is_premature_clear) expr({ expect_error(dbClearResult(res), NA) res <- NULL }) test_expr <- expr({ !!bind_values_expr !!name_values_expr !!set_bind_values_patched_expr !!send_expr !!clear_expr !!bind_expr !!post_bind_expr !!clear_now_expr }) test_expr } construct_expr <- function(x) { xc <- tryCatch( constructive::construct(x, check = TRUE), error = function(e) { print(e) constructive::construct(x, check = FALSE) } ) parse_expr(format(xc$code)) } fix_params <- function(params) { if (is.atomic(params)) { params <- as.list(params) } as.data.frame(params, fix.empty.names = FALSE) } DBItest/R/test-getting-started.R0000644000176200001440000000106014602017371016154 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_getting_started()]: #' Getting started with testing NULL #' Getting started with testing #' #' Tests very basic features of a DBI driver package, to support testing #' and test-first development right from the start. #' #' @inheritParams test_all #' @include test-all.R #' @family tests #' @export test_getting_started <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Getting started" run_tests(ctx, spec_getting_started, skip, run_only, test_suite) } DBItest/R/spec-result-roundtrip.R0000644000176200001440000002763614725004207016405 0ustar liggesusers#' spec_result_roundtrip #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_roundtrip <- list( data_integer = function(ctx, con) { #' @section Specification: #' The column types of the returned data frame depend on the data returned: #' - [integer] (or coercible to an integer) for integer values between -2^31 and 2^31 - 1, #' with [NA] for SQL `NULL` values test_select_with_null(.ctx = ctx, con, 1L ~ equals_one, -100L ~ equals_minus_100) }, data_numeric = function(ctx, con) { #' - [numeric] for numbers with a fractional component, #' with NA for SQL `NULL` values test_select_with_null(.ctx = ctx, con, 1.5, -100.5) }, data_logical = function(ctx, con) { #' - [logical] for Boolean values (some backends may return an integer); int_values <- 1:0 values <- ctx$tweaks$logical_return(as.logical(int_values)) sql_names <- paste0("CAST(", int_values, " AS ", dbDataType(con, logical()), ")") #' with NA for SQL `NULL` values test_select_with_null(.ctx = ctx, con, !!!setNames(values, sql_names)) }, data_character = function(ctx, con) { #' - [character] for text, values <- get_texts() test_funs <- rep(list(has_utf8_or_ascii_encoding), length(values)) sql_names <- as.character(dbQuoteString(con, values)) #' with NA for SQL `NULL` values test_select_with_null(.ctx = ctx, con, !!!setNames(values, sql_names)) test_select_with_null(.ctx = ctx, con, !!!setNames(test_funs, sql_names)) }, data_raw = function(ctx, con) { #' - lists of [raw] for blobs if (isTRUE(ctx$tweaks$omit_blob_tests)) { skip("tweak: omit_blob_tests") } is_raw_list <- function(x) { is.list(x) && is.raw(x[[1L]]) } values <- list(is_raw_list) sql_names <- ctx$tweaks$blob_cast(DBI::dbQuoteLiteral(con, list(raw(1)))) #' with [NULL] entries for SQL NULL values test_select_with_null(.ctx = ctx, con, !!!setNames(values, sql_names)) }, data_date = function(ctx, con) { #' - coercible using [as.Date()] for dates, as_date_equals_to <- function(x) { map(x, function(xx) { function(value) as.Date(value) == xx }) } char_values <- paste0("2015-01-", sprintf("%.2d", 1:12)) values <- as_date_equals_to(as.Date(char_values)) sql_names <- ctx$tweaks$date_cast(char_values) #' with NA for SQL `NULL` values test_select_with_null(.ctx = ctx, con, !!!setNames(values, sql_names)) }, data_date_current = function(ctx, con) { #' (also applies to the return value of the SQL function `current_date`) test_select_with_null( .ctx = ctx, con, "current_date" ~ is_roughly_current_date ) }, data_time = function(ctx, con) { #' - coercible using [hms::as_hms()] for times, as_hms_equals_to <- function(x) { map(x, function(xx) { function(value) hms::as_hms(value) == xx }) } char_values <- c("00:00:00", "12:34:56") time_values <- as_hms_equals_to(hms::as_hms(char_values)) sql_names <- ctx$tweaks$time_cast(char_values) #' with NA for SQL `NULL` values test_select_with_null(.ctx = ctx, con, !!!setNames(time_values, sql_names)) }, data_time_current = function(ctx, con) { #' (also applies to the return value of the SQL function `current_time`) test_select_with_null( .ctx = ctx, con, "current_time" ~ coercible_to_time ) }, data_timestamp = function(ctx, con) { #' - coercible using [as.POSIXct()] for timestamps, coercible_to_timestamp <- function(x) { x_timestamp <- try_silent(as.POSIXct(x)) !is.null(x_timestamp) && all(is.na(x) == is.na(x_timestamp)) } char_values <- c("2015-10-11 00:00:00", "2015-10-11 12:34:56") time_values <- rep(list(coercible_to_timestamp), 2L) sql_names <- ctx$tweaks$timestamp_cast(char_values) #' with NA for SQL `NULL` values test_select_with_null(.ctx = ctx, con, !!!setNames(time_values, sql_names)) }, data_timestamp_current = function(ctx, con) { #' (also applies to the return value of the SQL function `current_timestamp`) test_select_with_null( .ctx = ctx, con, "current_timestamp" ~ function(x) { coercible_to_timestamp <- function(x) { x_timestamp <- try_silent(as.POSIXct(x)) !is.null(x_timestamp) && all(is.na(x) == is.na(x_timestamp)) } coercible_to_timestamp(x) && (Sys.time() - as.POSIXct(x, tz = "UTC") <= hms::hms(2)) } ) }, #' data_date_typed = function(ctx, con) { #' If dates and timestamps are supported by the backend, the following R types are #' used: #' - [Date] for dates if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } char_values <- paste0("2015-01-", sprintf("%.2d", 1:12)) values <- map(char_values, as_numeric_date) sql_names <- ctx$tweaks$date_cast(char_values) test_select_with_null(.ctx = ctx, con, !!!setNames(values, sql_names)) }, data_date_current_typed = function(ctx, con) { #' (also applies to the return value of the SQL function `current_date`) if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } test_select_with_null( .ctx = ctx, con, "current_date" ~ is_roughly_current_date_typed ) }, data_timestamp_typed = function(ctx, con) { #' - [POSIXct] for timestamps if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } char_values <- c("2015-10-11 00:00:00", "2015-10-11 12:34:56") timestamp_values <- rep(list(is_timestamp), 2L) sql_names <- ctx$tweaks$timestamp_cast(char_values) test_select_with_null(.ctx = ctx, con, !!!setNames(timestamp_values, sql_names)) }, data_timestamp_current_typed = function(ctx, con) { #' (also applies to the return value of the SQL function `current_timestamp`) if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } test_select_with_null( .ctx = ctx, con, "current_timestamp" ~ is_roughly_current_timestamp_typed ) }, #' #' R has no built-in type with lossless support for the full range of 64-bit #' or larger integers. If 64-bit integers are returned from a query, #' the following rules apply: #' - Values are returned in a container with support for the full range of #' valid 64-bit values (such as the `integer64` class of the \pkg{bit64} #' package) #' - Coercion to numeric always returns a number that is as close as possible #' to the true value data_64_bit_numeric = function(ctx, con) { as_numeric_identical_to <- function(x) { map(x, function(xx) { function(value) as.numeric(value) == xx }) } char_values <- c("10000000000", "-10000000000") test_values <- as_numeric_identical_to(as.numeric(char_values)) test_select_with_null(.ctx = ctx, con, !!!setNames(test_values, char_values)) }, #' - Loss of precision when converting to numeric gives a warning data_64_bit_numeric_warning = function(ctx, con) { as_numeric_equals_to <- function(x) { map(x, function(xx) { function(value) isTRUE(all.equal(as.numeric(value), xx)) }) } char_values <- c(" 1234567890123456789", "-1234567890123456789") num_values <- as.numeric(char_values) test_values <- as_numeric_equals_to(num_values) suppressWarnings( expect_warning( test_select(.ctx = ctx, con, !!!setNames(test_values, char_values), .add_null = "none") ) ) suppressWarnings( expect_warning( test_select(.ctx = ctx, con, !!!setNames(test_values, char_values), .add_null = "above") ) ) suppressWarnings( expect_warning( test_select(.ctx = ctx, con, !!!setNames(test_values, char_values), .add_null = "below") ) ) }, #' - Conversion to character always returns a lossless decimal representation #' of the data data_64_bit_lossless = function(ctx, con) { as_character_equals_to <- function(x) { map(x, function(xx) { function(value) as.character(value) == xx }) } char_values <- c("1234567890123456789", "-1234567890123456789") test_values <- as_character_equals_to(char_values) test_select_with_null(.ctx = ctx, con, !!!setNames(test_values, char_values)) }, # NULL ) test_select_with_null <- function(...) { test_select(..., .add_null = "none") test_select(..., .add_null = "above") test_select(..., .add_null = "below") } test_select <- function( con, ..., .add_null = "none", .ctx, .envir = parent.frame()) { values <- list2(...) value_is_formula <- map_lgl(values, is.call) names(values)[value_is_formula] <- map(values[value_is_formula], "[[", 2L) values[value_is_formula] <- map( values[value_is_formula], function(x) { eval(x[[3]], envir = .envir) } ) if (is.null(names(values))) { sql_values <- map(values, as.character) } else { sql_values <- names(values) } if (isTRUE(.ctx$tweaks$current_needs_parens)) { sql_values <- gsub( "^(current_(?:date|time|timestamp))$", "\\1()", sql_values ) } sql_names <- letters[seq_along(sql_values)] query <- paste( "SELECT", paste(sql_values, "as", sql_names, collapse = ", ") ) if (.add_null != "none") { query_null <- paste( "SELECT", paste("NULL as", sql_names, collapse = ", ") ) query <- c(query, query_null) if (.add_null == "above") { query <- rev(query) } query <- paste0(query, ", ", 1:2, " as id") query <- sql_union(.ctx = .ctx, query) } rows <- check_df(dbGetQuery(con, query)) if (.add_null != "none") { rows <- rows[order(rows$id), -(length(sql_names) + 1L), drop = FALSE] if (.add_null == "above") { rows <- rows[2:1, , drop = FALSE] } } expect_identical(names(rows), sql_names) for (i in seq_along(values)) { value_or_testfun <- values[[i]] if (is.function(value_or_testfun)) { eval(bquote(expect_true(value_or_testfun(rows[1L, .(i)])))) } else { eval(bquote(expect_identical(rows[1L, .(i)], .(value_or_testfun)))) } } if (.add_null != "none") { expect_equal(nrow(rows), 2L) if (is.list(rows[[1L]])) { expect_true(is.null(rows[2L, 1L][[1L]])) } else { expect_true(is.na(rows[2L, 1L])) } } else { expect_equal(nrow(rows), 1L) } } equals_one <- function(x) { identical(as.integer(x), 1L) && identical(as.numeric(x), 1) } equals_minus_100 <- function(x) { identical(as.integer(x), -100L) && identical(as.numeric(x), -100) } all_have_utf8_or_ascii_encoding <- function(x) { all(map_lgl(x, has_utf8_or_ascii_encoding)) } has_utf8_or_ascii_encoding <- function(x) { if (Encoding(x) == "UTF-8") { TRUE } else if (Encoding(x) == "unknown") { # Characters encoded as "unknown" must be ASCII only, and remain "unknown" # after attempting to assign an encoding. From ?Encoding : # > ASCII strings will never be marked with a declared encoding, since their # > representation is the same in all supported encodings. Encoding(x) <- "UTF-8" Encoding(x) == "unknown" } else { FALSE } } coercible_to_date <- function(x) { x_date <- try_silent(as.Date(x)) !is.null(x_date) && all(is.na(x) == is.na(x_date)) } is_roughly_current_date <- function(x) { coercible_to_date(x) && (abs(Sys.Date() - as.Date(x)) <= 1) } coercible_to_time <- function(x) { x_hms <- try_silent(hms::as_hms(x)) !is.null(x_hms) && all(is.na(x) == is.na(x_hms)) } as_timestamp_equals_to <- function(x) { map(x, function(xx) { function(value) as.POSIXct(value) == xx }) } is_date <- function(x) { inherits(x, "Date") } is_roughly_current_date_typed <- function(x) { is_date(x) && (abs(Sys.Date() - x) <= 1) } is_timestamp <- function(x) { inherits(x, "POSIXct") } is_roughly_current_timestamp_typed <- function(x) { is_timestamp(x) && (Sys.time() - x <= hms::hms(2)) } as_numeric_date <- function(d) { d <- as.Date(d) structure(as.numeric(unclass(d)), class = class(d)) } DBItest/R/dbi.R0000644000176200001440000000252014602017371012632 0ustar liggesusersfetch_dbi_generics <- function() { dbi <- asNamespace("DBI") dbi_generics <- grep("^[.]__T__db", getNamespaceExports(dbi), value = TRUE) clean_dbi_generics <- gsub("^[.]__T__(.*):DBI$", "\\1", dbi_generics) active_dbi_generics <- setdiff(clean_dbi_generics, c( "dbDriver", "dbUnloadDriver", "dbListConnections", "dbListResults", "dbSetDataMappings", "dbGetException", "dbCallProc", "dbGetConnectArgs" )) dbi_names <- sort(c(active_dbi_generics, "Id")) dbi_names } create_generics <- function() { withr::local_collate("C") dbi_names <- fetch_dbi_generics() text <- paste0( "# Created by create_generics(), do not edit by hand\nall_dbi_generics <- function() {\n c(\n", paste0(' "', dbi_names, '",\n', collapse = ""), " NULL\n )\n}" ) writeLines(text, "R/generics.R") } dbi_generics <- function(version) { version <- as.package_version(version) generics <- all_dbi_generics() if (version < "1.7.99.1") { generics <- setdiff(generics, c( "dbGetQueryArrow", "dbAppendTableArrow", "dbFetchArrow", "dbFetchArrowChunk", "dbWriteTableArrow", "dbSendQueryArrow", "dbReadTableArrow", "dbCreateTableArrow" )) } if (version < "1.7.99.11") { generics <- setdiff(generics, c( "dbBindArrow" )) } generics } DBItest/R/spec-arrow-bind.R0000644000176200001440000000010314602017371015063 0ustar liggesusers# FIXME: Adapt tests from spec_meta_bind spec_arrow_bind <- list() DBItest/R/spec-arrow-append-table-arrow.R0000644000176200001440000004344714602061640017653 0ustar liggesusers#' spec_arrow_append_table_arrow #' @family Arrow specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_arrow_append_table_arrow <- list( arrow_append_table_arrow_formals = function() { # expect_equal(names(formals(dbAppendTableArrow)), c("conn", "name", "value", "...")) }, arrow_append_table_arrow_return = function(ctx, con, table_name) { skip_if_not_dbitest(ctx, "1.8.0.50") #' @return #' `dbAppendTableArrow()` returns a test_in <- stream_frame(trivial_df()) dbCreateTableArrow(con, table_name, test_in) ret <- dbAppendTableArrow(con, table_name, test_in) #' scalar expect_equal(length(ret), 1) #' numeric. expect_true(is.numeric(ret)) }, #' arrow_append_table_arrow_missing = function(con, table_name) { #' @section Failure modes: #' If the table does not exist, expect_false(dbExistsTable(con, table_name)) expect_error(dbAppendTableArrow(con, table_name, stream_frame(a = 2L))) }, arrow_append_table_arrow_invalid_value = function(con, table_name) { #' or the new data in `values` is not a data frame or has different column names, #' an error is raised; the remote table remains unchanged. test_in <- trivial_df() dbCreateTableArrow(con, table_name, test_in %>% stream_frame()) expect_error(dbAppendTableArrow(con, table_name, test_in %>% stream_frame() %>% unclass())) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_arrow(test_out, test_in[0, , drop = FALSE]) }, arrow_append_table_arrow_append_incompatible = function(con, table_name) { test_in <- trivial_df() dbCreateTableArrow(con, table_name, test_in %>% stream_frame()) dbAppendTableArrow(con, table_name, test_in %>% stream_frame()) expect_error(dbAppendTableArrow(con, table_name, stream_frame(b = 2L))) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_arrow(test_out, test_in) }, #' arrow_append_table_arrow_closed_connection = function(ctx, closed_con) { #' An error is raised when calling this method for a closed expect_error(dbAppendTableArrow(closed_con, "test", stream_frame(a = 1))) }, arrow_append_table_arrow_invalid_connection = function(ctx, invalid_con) { #' or invalid connection. expect_error(dbAppendTableArrow(invalid_con, "test", stream_frame(a = 1))) }, arrow_append_table_arrow_error = function(con, table_name) { #' An error is also raised test_in <- stream_frame(a = 1L) #' if `name` cannot be processed with [dbQuoteIdentifier()] or expect_error(dbAppendTableArrow(con, NA, test_in)) #' if this results in a non-scalar. expect_error(dbAppendTableArrow(con, c("test", "test"), test_in)) }, #' arrow_append_table_arrow_roundtrip_keywords = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.49") #' @section Specification: #' SQL keywords can be used freely in table names, column names, and data. tbl_in <- data.frame( select = "unique", from = "join", where = "order", stringsAsFactors = FALSE ) test_arrow_roundtrip(use_append = TRUE, con, tbl_in, name = "exists") }, arrow_append_table_arrow_roundtrip_quotes = function(ctx, con, table_name) { skip_if_not_dbitest(ctx, "1.8.0.48") #' Quotes, commas, spaces, and other special characters such as newlines and tabs, #' can also be used in the data, tbl_in <- data.frame( as.character(dbQuoteString(con, "")), as.character(dbQuoteIdentifier(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\Nb", "a\\tb", "a\\rb", "a\\bb", "a\\Zb", stringsAsFactors = FALSE ) names(tbl_in) <- letters[seq_along(tbl_in)] test_arrow_roundtrip(con, tbl_in, use_append = TRUE) }, arrow_append_table_arrow_roundtrip_quotes_table_names = function(ctx, con) { #' and, if the database supports non-syntactic identifiers, #' also for table names if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } table_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\Nb", "a\\tb", "a\\rb", "a\\bb", "a\\Zb" ) tbl_in <- data.frame(trivial_df()) for (table_name in table_names) { test_arrow_roundtrip_one(con, tbl_in, use_append = TRUE, .add_na = FALSE) } }, arrow_append_table_arrow_roundtrip_quotes_column_names = function(ctx, con) { #' and column names. if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } column_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\nb", "a\\tb", "a\\rb", "a\\bb", "a\\zb" ) tbl_in <- data.frame(trivial_df(length(column_names), column_names)) test_arrow_roundtrip_one(con, tbl_in, use_append = TRUE, .add_na = FALSE) }, #' arrow_append_table_arrow_roundtrip_integer = function(con) { #' The following data types must be supported at least, #' and be read identically with [dbReadTable()]: #' - integer tbl_in <- data.frame(a = c(1:5)) test_arrow_roundtrip(use_append = TRUE, con, tbl_in) }, arrow_append_table_arrow_roundtrip_numeric = function(con) { #' - numeric tbl_in <- data.frame(a = c(seq(1, 3, by = 0.5))) test_arrow_roundtrip(use_append = TRUE, con, tbl_in) #' (the behavior for `Inf` and `NaN` is not specified) }, arrow_append_table_arrow_roundtrip_logical = function(ctx, con) { #' - logical tbl_in <- data.frame(a = c(TRUE, FALSE, NA)) tbl_exp <- tbl_in tbl_exp$a <- ctx$tweaks$logical_return(tbl_exp$a) test_arrow_roundtrip(use_append = TRUE, con, tbl_in, tbl_exp) }, arrow_append_table_arrow_roundtrip_null = function(con) { #' - `NA` as NULL tbl_in <- data.frame(a = NA) test_arrow_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { tbl_out$a <- as.logical(tbl_out$a) # Plain NA is of type logical tbl_out } ) }, #' - 64-bit values (using `"bigint"` as field type); the result can be arrow_append_table_arrow_roundtrip_64_bit_numeric = function(ctx, con) { skip("Internal: Need to enhance test_arrow_roundtrip()") tbl_in <- data.frame(a = c(-1e14, 1e15)) test_arrow_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { #' - converted to a numeric, which may lose precision, tbl_out$a <- as.numeric(tbl_out$a) tbl_out } ) }, # arrow_append_table_arrow_roundtrip_64_bit_character = function(ctx, con) { skip("Internal: Need to enhance test_arrow_roundtrip()") tbl_in <- data.frame(a = c(-1e14, 1e15)) tbl_exp <- tbl_in tbl_exp$a <- format(tbl_exp$a, scientific = FALSE) test_arrow_roundtrip( use_append = TRUE, con, tbl_in, tbl_exp, transform = function(tbl_out) { #' - converted a character vector, which gives the full decimal #' representation tbl_out$a <- as.character(tbl_out$a) tbl_out } ) }, # arrow_append_table_arrow_roundtrip_64_bit_roundtrip = function(ctx, con, table_name) { skip("Internal: Need to enhance test_arrow_roundtrip()") tbl_in <- data.frame(a = c(-1e14, 1e15)) dbWriteTable(con, table_name, tbl_in, field.types = c(a = "BIGINT")) tbl_out <- dbReadTable(con, table_name) #' - written to another table and read again unchanged test_arrow_roundtrip(use_append = TRUE, con, tbl_out, tbl_expected = tbl_out) }, arrow_append_table_arrow_roundtrip_character = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.45") #' - character (in both UTF-8 tbl_in <- data.frame( id = seq_along(get_texts()), a = get_texts(), stringsAsFactors = FALSE ) test_arrow_roundtrip(use_append = TRUE, con, tbl_in) }, arrow_append_table_arrow_roundtrip_character_native = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.44") #' and native encodings), tbl_in <- data.frame( a = c(enc2native(get_texts())), stringsAsFactors = FALSE ) test_arrow_roundtrip(use_append = TRUE, con, tbl_in) }, arrow_append_table_arrow_roundtrip_character_empty = function(con) { #' supporting empty strings tbl_in <- data.frame( a = c("", "a"), stringsAsFactors = FALSE ) test_arrow_roundtrip(use_append = TRUE, con, tbl_in) }, arrow_append_table_arrow_roundtrip_character_empty_after = function(con) { #' (before and after non-empty strings) tbl_in <- data.frame( a = c("a", ""), stringsAsFactors = FALSE ) test_arrow_roundtrip(use_append = TRUE, con, tbl_in) }, arrow_append_table_arrow_roundtrip_factor = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.43") #' - factor (possibly returned as character) tbl_in <- data.frame( a = factor(get_texts()) ) tbl_exp <- tbl_in tbl_exp$a <- as.character(tbl_exp$a) test_arrow_roundtrip( use_append = TRUE, con, tbl_in, tbl_exp, transform = function(tbl_out) { tbl_out$a <- as.character(tbl_out$a) tbl_out } ) }, arrow_append_table_arrow_roundtrip_blob = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.41") #' - objects of type [blob::blob] #' (if supported by the database) if (isTRUE(ctx$tweaks$omit_blob_tests)) { skip("tweak: omit_blob_tests") } tbl_in <- data.frame(id = 1L, a = blob::blob(as.raw(0:10))) test_arrow_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { tbl_out$a <- blob::as_blob(tbl_out$a) tbl_out } ) }, arrow_append_table_arrow_roundtrip_date = function(ctx, con) { #' - date #' (if supported by the database; if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } #' returned as `Date`) tbl_in <- data.frame(a = as_numeric_date(c(Sys.Date() + 1:5))) test_arrow_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { expect_type(unclass(tbl_out$a), "double") tbl_out } ) }, arrow_append_table_arrow_roundtrip_date_extended = function(ctx, con) { #' also for dates prior to 1970 or 1900 or after 2038 if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } tbl_in <- data.frame(a = as_numeric_date(c( "1811-11-11", "1899-12-31", "1900-01-01", "1950-05-05", "1969-12-31", "1970-01-01", "2037-01-01", "2038-01-01", "2040-01-01", "2999-09-09" ))) test_arrow_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { expect_type(unclass(tbl_out$a), "double") tbl_out } ) }, arrow_append_table_arrow_roundtrip_time = function(ctx, con) { #' - time #' (if supported by the database; if (!isTRUE(ctx$tweaks$time_typed)) { skip("tweak: !time_typed") } tbl_in <- data.frame(a = hms::hms(minutes = 1:5)) tbl_in$b <- .difftime(as.numeric(tbl_in$a) / 60, "mins") tbl_exp <- tbl_in tbl_exp$a <- hms::as_hms(tbl_exp$a) tbl_exp$b <- hms::as_hms(tbl_exp$b) test_arrow_roundtrip( con, tbl_in, tbl_exp, transform = function(tbl_out) { #' returned as objects that inherit from `difftime`) expect_s3_class(tbl_out$a, "difftime") expect_s3_class(tbl_out$b, "difftime") tbl_out$a <- hms::as_hms(tbl_out$a) tbl_out$b <- hms::as_hms(tbl_out$b) tbl_out } ) }, arrow_append_table_arrow_roundtrip_timestamp = function(ctx, con) { #' - timestamp #' (if supported by the database; if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } #' returned as `POSIXct` local <- round(Sys.time()) + c( 1, 60, 3600, 86400, 86400 * 90, 86400 * 180, 86400 * 270, 1e9, 5e9 ) attr(local, "tzone") <- "" tbl_in <- data.frame(id = seq_along(local)) tbl_in$local <- local tbl_in$gmt <- lubridate::with_tz(local, tzone = "GMT") tbl_in$pst8pdt <- lubridate::with_tz(local, tzone = "PST8PDT") tbl_in$utc <- lubridate::with_tz(local, tzone = "UTC") #' respecting the time zone but not necessarily preserving the #' input time zone), test_arrow_roundtrip( use_append = TRUE, con, tbl_in, transform = function(out) { dates <- map_lgl(out, inherits, "POSIXt") tz <- toupper(names(out)) tz[tz == "LOCAL"] <- "" out[dates] <- Map(lubridate::with_tz, out[dates], tz[dates]) out } ) }, arrow_append_table_arrow_roundtrip_timestamp_extended = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.40") #' also for timestamps prior to 1970 or 1900 or after 2038 if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } local <- as.POSIXct(c( "1811-11-11", "1899-12-31", "1900-01-01", "1950-05-05", "1969-12-31", "1970-01-01", "2037-01-01", "2038-01-01", "2040-01-01", "2999-09-09" )) attr(local, "tzone") <- "" tbl_in <- data.frame(id = seq_along(local)) tbl_in$local <- local tbl_in$gmt <- lubridate::with_tz(local, tzone = "GMT") tbl_in$pst8pdt <- lubridate::with_tz(local, tzone = "PST8PDT") tbl_in$utc <- lubridate::with_tz(local, tzone = "UTC") #' respecting the time zone but not necessarily preserving the #' input time zone) test_arrow_roundtrip( use_append = TRUE, con, tbl_in, transform = function(out) { dates <- map_lgl(out, inherits, "POSIXt") tz <- toupper(names(out)) tz[tz == "LOCAL"] <- "" out[dates] <- Map(lubridate::with_tz, out[dates], tz[dates]) out } ) }, #' arrow_append_table_arrow_roundtrip_mixed = function(con) { #' Mixing column types in the same table is supported. data <- list("a", 1L, 1.5) data <- map(data, c, NA) expanded <- expand.grid(a = data, b = data, c = data) tbl_in_list <- map( seq_len(nrow(expanded)), function(i) { data.frame(map(expanded[i, ], unlist, recursive = FALSE)) } ) map(tbl_in_list, test_arrow_roundtrip, con = con) }, arrow_append_table_arrow_name = function(ctx, con) { #' @section Specification: #' The `name` argument is processed as follows, #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- trivial_df() local_remove_test_table(con, table_name) #' - If an unquoted table name as string: `dbAppendTableArrow()` will do the quoting, dbCreateTableArrow(con, table_name, test_in %>% stream_frame()) dbAppendTableArrow(con, table_name, test_in %>% stream_frame()) test_out <- check_df(dbReadTable(con, dbQuoteIdentifier(con, table_name))) expect_equal_arrow(test_out, test_in) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` } }, arrow_append_table_arrow_name_quoted = function(ctx, con) { #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done skip_if_not_dbitest(ctx, "1.7.2") #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- trivial_df() local_remove_test_table(con, table_name) dbCreateTableArrow(con, dbQuoteIdentifier(con, table_name), test_in %>% stream_frame()) dbAppendTableArrow(con, dbQuoteIdentifier(con, table_name), test_in %>% stream_frame()) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_arrow(test_out, test_in) } }, #' arrow_append_table_arrow_value_df = function(con, table_name) { #' @section Specification: #' The `value` argument must be a data frame test_in <- trivial_df() dbCreateTableArrow(con, table_name, test_in %>% stream_frame()) dbAppendTableArrow(con, table_name, test_in %>% stream_frame()) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_arrow(test_out, test_in) }, arrow_append_table_arrow_value_subset = function(ctx, con, table_name) { #' with a subset of the columns of the existing table. test_in <- trivial_df(3, letters[1:3]) dbCreateTableArrow(con, table_name, test_in %>% stream_frame()) dbAppendTableArrow(con, table_name, test_in %>% stream_frame(.select = c(2))) test_out <- check_df(dbReadTable(con, table_name)) test_in[c(1, 3)] <- NA_real_ expect_equal_arrow(test_out, test_in) }, arrow_append_table_arrow_value_shuffle = function(ctx, con, table_name) { #' The order of the columns does not matter. test_in <- trivial_df(3, letters[1:3]) dbCreateTableArrow(con, table_name, test_in %>% stream_frame()) dbAppendTableArrow(con, table_name, test_in %>% stream_frame(.select = c(2, 3, 1))) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_arrow(test_out, test_in) }, # arrow_append_table_arrow_value_shuffle_subset = function(ctx, con, table_name) { test_in <- trivial_df(4, letters[1:4]) dbCreateTableArrow(con, table_name, test_in %>% stream_frame()) dbAppendTableArrow(con, table_name, test_in %>% stream_frame(.select = c(4, 1, 3))) test_out <- check_df(dbReadTable(con, table_name)) test_in[2] <- NA_real_ expect_equal_arrow(test_out, test_in) }, # NULL ) DBItest/R/spec-result-send-query.R0000644000176200001440000001041414602017371016435 0ustar liggesusers#' spec_result_send_query #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_send_query <- list( send_query_formals = function() { # expect_equal(names(formals(dbSendQuery)), c("conn", "statement", "...")) }, send_query_trivial = function(con) { #' @return #' `dbSendQuery()` returns res <- expect_visible(dbSendQuery(con, trivial_query())) #' an S4 object that inherits from [DBIResult-class]. expect_s4_class(res, "DBIResult") #' The result set can be used with [dbFetch()] to extract records. expect_equal(check_df(dbFetch(res))[[1]], 1.5) #' Once you have finished using a result, make sure to clear it #' with [dbClearResult()]. dbClearResult(res) }, #' send_query_closed_connection = function(ctx, closed_con) { #' @section Failure modes: #' An error is raised when issuing a query over a closed expect_error(dbSendQuery(closed_con, trivial_query())) }, send_query_invalid_connection = function(ctx, invalid_con) { #' or invalid connection, expect_error(dbSendQuery(invalid_con, trivial_query())) }, send_query_non_string = function(con) { #' or if the query is not a non-`NA` string. expect_error(dbSendQuery(con, character())) expect_error(dbSendQuery(con, letters)) expect_error(dbSendQuery(con, NA_character_)) }, send_query_syntax_error = function(con) { #' An error is also raised if the syntax of the query is invalid #' and all query parameters are given (by passing the `params` argument) #' or the `immediate` argument is set to `TRUE`. #' #' @section Failure modes: expect_error(dbSendQuery(con, "SELLECT", params = list())) expect_error(dbSendQuery(con, "SELLECT", immediate = TRUE)) }, #' @section Additional arguments: #' The following arguments are not part of the `dbSendQuery()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `params` (default: `NULL`) #' - `immediate` (default: `NULL`) #' #' They must be provided as named arguments. #' See the "Specification" sections for details on their usage. send_query_result_valid = function(con) { #' @section Specification: #' No warnings occur under normal conditions. expect_warning(res <- dbSendQuery(con, trivial_query()), NA) #' When done, the DBIResult object must be cleared with a call to #' [dbClearResult()]. dbClearResult(res) }, # send_query_stale_warning = function(ctx) { #' Failure to clear the result set leads to a warning #' when the connection is closed. con <- connect(ctx) on.exit(dbDisconnect(con)) expect_warning(dbSendQuery(con, trivial_query()), NA) expect_warning({ dbDisconnect(con) gc() }) on.exit(NULL) }, #' send_query_only_one_result_set = function(con) { #' If the backend supports only one open result set per connection, res1 <- dbSendQuery(con, trivial_query()) #' issuing a second query invalidates an already open result set #' and raises a warning. expect_warning(res2 <- dbSendQuery(con, "SELECT 2")) expect_false(dbIsValid(res1)) #' The newly opened result set is valid expect_true(dbIsValid(res2)) #' and must be cleared with `dbClearResult()`. dbClearResult(res2) }, #' send_query_params = function(ctx, con) { #' The `param` argument allows passing query parameters, see [dbBind()] for details. placeholder_funs <- get_placeholder_funs(ctx) for (placeholder_fun in placeholder_funs) { placeholder <- placeholder_fun(1) query <- paste0("SELECT ", placeholder, " + 1.0 AS a") values <- trivial_values(3) - 1 params <- stats::setNames(list(values), names(placeholder)) rs <- dbSendQuery(con, query, params = params) ret <- dbFetch(rs) expect_equal(ret, trivial_df(3), info = placeholder) dbClearResult(rs) } }, send_query_immediate = function(con, table_name) { #' @inheritSection spec_result_get_query Specification for the `immediate` argument res <- expect_visible(dbSendQuery(con, trivial_query(), immediate = TRUE)) expect_s4_class(res, "DBIResult") expect_error(dbGetRowsAffected(res), NA) dbClearResult(res) }, # NULL ) DBItest/R/spec-meta.R0000644000176200001440000000062314602017371013754 0ustar liggesusers#' @format NULL spec_meta <- c( spec_meta_bind_formals, spec_meta_bind, spec_meta_arrow_bind, spec_meta_stream_bind, spec_meta_arrow_stream_bind, spec_meta_is_valid, spec_meta_has_completed, spec_meta_get_statement, spec_meta_column_info, spec_meta_get_row_count, spec_meta_get_rows_affected, spec_meta_get_info_result, # # no 64-bit or time input data type yet # NULL ) DBItest/R/spec-meta-is-valid.R0000644000176200001440000000463214602017371015466 0ustar liggesusers#' spec_meta_is_valid #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_is_valid <- list( is_valid_formals = function() { # expect_equal(names(formals(dbIsValid)), c("dbObj", "...")) }, is_valid_connection = function(ctx) { #' @return #' `dbIsValid()` returns a logical scalar, #' `TRUE` if the object specified by `dbObj` is valid, #' `FALSE` otherwise. con <- connect(ctx) #' A [DBIConnection-class] object is initially valid, expect_true(expect_visible(dbIsValid(con))) expect_error(dbDisconnect(con), NA) #' and becomes invalid after disconnecting with [dbDisconnect()]. expect_false(expect_visible(dbIsValid(con))) }, # is_valid_stale_connection = function(ctx, invalid_con) { #' For an invalid connection object (e.g., for some drivers if the object #' is saved to a file and then restored), the method also returns `FALSE`. expect_false(expect_visible(dbIsValid(invalid_con))) }, # is_valid_result_query = function(con) { query <- trivial_query() res <- dbSendQuery(con, query) on.exit(dbClearResult(res)) #' A [DBIResult-class] object is valid after a call to [dbSendQuery()], expect_true(expect_visible(dbIsValid(res))) expect_error(dbFetch(res), NA) #' and stays valid even after all rows have been fetched; expect_true(expect_visible(dbIsValid(res))) dbClearResult(res) on.exit(NULL) #' only clearing it with [dbClearResult()] invalidates it. expect_false(dbIsValid(res)) }, # is_valid_result_statement = function(con, table_name) { query <- paste0("CREATE TABLE ", table_name, " (a ", dbDataType(con, 1L), ")") res <- dbSendStatement(con, query) on.exit(dbClearResult(res)) #' A [DBIResult-class] object is also valid after a call to [dbSendStatement()], expect_true(expect_visible(dbIsValid(res))) #' and stays valid after querying the number of rows affected; expect_error(dbGetRowsAffected(res), NA) expect_true(expect_visible(dbIsValid(res))) dbClearResult(res) on.exit(NULL) #' only clearing it with [dbClearResult()] invalidates it. expect_false(dbIsValid(res)) }, #' If the connection to the database system is dropped (e.g., due to #' connectivity problems, server failure, etc.), `dbIsValid()` should return #' `FALSE`. This is not tested automatically. NULL ) DBItest/R/spec-meta-has-completed.R0000644000176200001440000000466314602017371016507 0ustar liggesusers#' spec_meta_has_completed #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_has_completed <- list( has_completed_formals = function() { # expect_equal(names(formals(dbHasCompleted)), c("res", "...")) }, has_completed_query = function(con) { #' @return #' `dbHasCompleted()` returns a logical scalar. #' For a query initiated by [dbSendQuery()] with non-empty result set, res <- local_result(dbSendQuery(con, trivial_query())) #' `dbHasCompleted()` returns `FALSE` initially expect_false(expect_visible(dbHasCompleted(res))) #' and `TRUE` after calling [dbFetch()] without limit. check_df(dbFetch(res)) expect_true(expect_visible(dbHasCompleted(res))) }, # has_completed_statement = function(con, table_name) { #' For a query initiated by [dbSendStatement()], res <- local_result(dbSendStatement(con, paste0("CREATE TABLE ", table_name, " (a integer)"))) #' `dbHasCompleted()` always returns `TRUE`. expect_true(expect_visible(dbHasCompleted(res))) }, #' has_completed_error = function(con) { #' @section Failure modes: res <- dbSendQuery(con, trivial_query()) dbClearResult(res) #' Attempting to query completion status for a result set cleared with #' [dbClearResult()] gives an error. expect_error(dbHasCompleted(res)) }, has_completed_query_spec = function(con) { #' @section Specification: #' The completion status for a query is only guaranteed to be set to #' `FALSE` after attempting to fetch past the end of the entire result. #' Therefore, for a query with an empty result set, res <- local_result(dbSendQuery(con, "SELECT * FROM (SELECT 1 as a) AS x WHERE (1 = 0)")) #' the initial return value is unspecified, #' but the result value is `TRUE` after trying to fetch only one row. check_df(dbFetch(res, 1)) expect_true(expect_visible(dbHasCompleted(res))) }, has_completed_query_spec_partial = function(con) { #' @section Specification: #' Similarly, for a query with a result set of length n, res <- local_result(dbSendQuery(con, trivial_query())) #' the return value is unspecified after fetching n rows, check_df(dbFetch(res, 1)) #' but the result value is `TRUE` after trying to fetch only one more #' row. check_df(dbFetch(res, 1)) expect_true(expect_visible(dbHasCompleted(res))) }, # NULL ) DBItest/R/spec-driver.R0000644000176200001440000000022214602017371014314 0ustar liggesusers#' @format NULL spec_driver <- c( spec_driver_constructor, spec_driver_data_type, spec_driver_get_info, spec_driver_connect, # NULL ) DBItest/R/spec-meta-bind-.R0000644000176200001440000001032214725054142014743 0ustar liggesusers# Helpers ----------------------------------------------------------------- test_select_bind_expr <- function( bind_values, ctx = stop("ctx is available during run time only"), ..., arrow, bind, query = TRUE, skip_fun = NULL, dbitest_version = NULL, cast_fun = NULL, requires_names = NULL) { force(bind_values) force(arrow) force(bind) caller <- sys.function(-1) caller_src <- utils::getSrcref(caller) caller_ref <- paste0("") roxygen_bits <- grep("#' .*$", as.character(caller_src), value = TRUE) docstring <- gsub("^ +#' *", "", roxygen_bits) header <- c(caller_ref, docstring) cast_fun <- enquo(cast_fun) has_cast_fun <- !quo_is_null(cast_fun) cast_fun_expr <- if (has_cast_fun) expr({ cast_fun <- !!quo_get_expr(cast_fun) }) test_expr <- test_select_bind_expr_one$fun( bind_values = bind_values, ..., arrow = arrow, bind = bind, query = query, has_cast_fun = has_cast_fun ) skip_dbitest_expr <- if (!is.null(dbitest_version)) expr({ skip_if_not_dbitest(ctx, !!dbitest_version) }) skip_expr <- if (!is.null(skip_fun)) expr({ skip_if(!!body(skip_fun)) }) if (is.null(requires_names)) { placeholder_funs_expr <- expr(get_placeholder_funs(ctx)) } else { placeholder_funs_expr <- expr(get_placeholder_funs(ctx, requires_names = !!requires_names)) } allow_na_rows_affected_expr <- if (!query) expr({ allow_na_rows_affected <- ctx$tweaks$allow_na_rows_affected }) expr({ !!!header !!skip_dbitest_expr !!skip_expr placeholder_funs <- !!placeholder_funs_expr is_null_check <- ctx$tweaks$is_null_check !!cast_fun_expr !!allow_na_rows_affected_expr for (placeholder_fun in placeholder_funs) { !!test_expr } }) } get_placeholder_funs <- function(ctx, requires_names = NULL) { placeholder_fun <- ctx$tweaks$placeholder_pattern if (is.character(placeholder_fun)) { placeholder_funs <- map(placeholder_fun, make_placeholder_fun) } else if (is.function(placeholder_fun)) { placeholder_funs <- list(placeholder_fun) } else { placeholder_funs <- placeholder_fun } if (length(placeholder_funs) == 0) { skip("Use the placeholder_pattern tweak, or skip all 'bind_.*' tests") } if (!is.null(requires_names)) { placeholder_fun_values <- map(placeholder_funs, ~ .x(1)) placeholder_unnamed <- map_lgl(placeholder_fun_values, ~ is.null(names(.x))) # run_bind_tester$fun() if (isTRUE(requires_names)) { placeholder_funs <- placeholder_funs[!placeholder_unnamed] } if (isFALSE(requires_names)) { placeholder_funs <- placeholder_funs[placeholder_unnamed] } } placeholder_funs } # make_placeholder_fun ---------------------------------------------------- #' Create a function that creates n placeholders #' #' For internal use by the `placeholder_format` tweak. #' #' @param pattern `[character(1)]`\cr Any character, optionally followed by `1` or `name`. Examples: `"?"`, `"$1"`, `":name"` #' #' @return `[function(n)]`\cr A function with one argument `n` that #' returns a vector of length `n` with placeholders of the specified format. #' #' @keywords internal #' @examples #' body(DBItest:::make_placeholder_fun("?")) #' DBItest:::make_placeholder_fun("?")(2) #' DBItest:::make_placeholder_fun("$1")(3) #' DBItest:::make_placeholder_fun(":name")(5) make_placeholder_fun <- function(pattern) { format_rx <- "^(.)(.*)$" character <- gsub(format_rx, "\\1", pattern) kind <- gsub(format_rx, "\\2", pattern) if (character == "") { stop("placeholder pattern must have at least one character", call. = FALSE) } if (kind == "") { eval(bquote( function(n) rep(.(character), n) )) } else if (kind == "1") { eval(bquote( function(n) paste0(.(character), seq_len(n)) )) } else if (kind == "name") { eval(bquote( function(n) { l <- letters[seq_len(n)] stats::setNames(paste0(.(character), l), l) } )) } else { stop("Pattern must be any character, optionally followed by 1 or name. Examples: $1, :name", call. = FALSE) } } is_na_or_null <- function(x) { identical(x, list(NULL)) || any(is.na(x)) } DBItest/R/test-driver.R0000644000176200001440000000066614602017371014355 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_driver()]: #' Test the "Driver" class NULL #' Test the "Driver" class #' #' @inheritParams test_all #' @include test-getting-started.R #' @family tests #' @importFrom withr with_temp_libpaths #' @export test_driver <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Driver" run_tests(ctx, spec_driver, skip, run_only, test_suite) } DBItest/R/test-meta.R0000644000176200001440000000057614602017371014010 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_meta()]: #' Test metadata functions NULL #' Test metadata functions #' #' @inheritParams test_all #' @include test-sql.R #' @family tests #' @export test_meta <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Metadata" run_tests(ctx, spec_meta, skip, run_only, test_suite) } DBItest/R/spec-sql.R0000644000176200001440000000056714602017371013634 0ustar liggesusers#' @format NULL spec_sql <- c( spec_sql_quote_string, spec_sql_quote_literal, spec_sql_quote_identifier, spec_sql_unquote_identifier, spec_sql_read_table, spec_sql_create_table, spec_sql_append_table, spec_sql_write_table, spec_sql_list_tables, spec_sql_exists_table, spec_sql_remove_table, spec_sql_list_objects, spec_sql_list_fields, # NULL ) DBItest/R/spec-getting-started.R0000644000176200001440000000160514602017371016134 0ustar liggesusers#' spec_getting_started #' @family getting specifications #' @usage NULL #' @format NULL #' @keywords NULL #' @section Definition: spec_getting_started <- list( package_dependencies = function(ctx) { #' A DBI backend is an R package pkg_path <- get_pkg_path(ctx) pkg_deps_df <- desc::desc_get_deps(pkg_path) pkg_imports <- pkg_deps_df$package[pkg_deps_df$type == "Imports"] #' which imports the \pkg{DBI} expect_true("DBI" %in% pkg_imports) #' and \pkg{methods} expect_true("methods" %in% pkg_imports) #' packages. }, # package_name = function(ctx) { pkg_name <- package_name(ctx) #' For better or worse, the names of many existing backends start with #' \sQuote{R}, e.g., \pkg{RSQLite}, \pkg{RMySQL}, \pkg{RSQLServer}; it is up #' to the backend author to adopt this convention or not. expect_match(pkg_name, "^R") }, # NULL ) DBItest/R/spec-sql-quote-literal.R0000644000176200001440000001156014602061640016412 0ustar liggesusers#' spec_sql_quote_literal #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_quote_literal <- list( quote_literal_formals = function() { # expect_equal(names(formals(dbQuoteLiteral)), c("conn", "x", "...")) }, quote_literal_return = function(con) { #' @return #' `dbQuoteLiteral()` returns an object that can be coerced to [character], simple <- "simple" simple_out <- dbQuoteLiteral(con, simple) expect_error(as.character(simple_out), NA) expect_type(as.character(simple_out), "character") expect_equal(length(simple_out), 1L) }, # quote_literal_vectorized = function(con) { #' of the same length as the input. letters_out <- dbQuoteLiteral(con, letters) expect_equal(length(letters_out), length(letters)) }, # quote_literal_empty = function(ctx, con) { skip_if_not_dbitest(ctx, "1.7.2") #' For an empty #' integer, expect_equal(length(dbQuoteLiteral(con, integer())), 0L) #' numeric, expect_equal(length(dbQuoteLiteral(con, numeric())), 0L) #' character, expect_equal(length(dbQuoteLiteral(con, character())), 0L) #' logical, expect_equal(length(dbQuoteLiteral(con, logical())), 0L) #' date, expect_equal(length(dbQuoteLiteral(con, Sys.Date()[0])), 0L) #' time, expect_equal(length(dbQuoteLiteral(con, Sys.time()[0])), 0L) #' or blob vector, expect_equal(length(dbQuoteLiteral(con, list())), 0L) #' this function returns a length-0 object. }, # quote_literal_double = function(con) { simple <- "simple" simple_out <- dbQuoteLiteral(con, simple) letters_out <- dbQuoteLiteral(con, letters) empty <- character() empty_out <- dbQuoteLiteral(con, character()) #' #' When passing the returned object again to `dbQuoteLiteral()` #' as `x` #' argument, it is returned unchanged. expect_identical(dbQuoteLiteral(con, simple_out), simple_out) expect_identical(dbQuoteLiteral(con, letters_out), letters_out) expect_identical(dbQuoteLiteral(con, empty_out), empty_out) #' Passing objects of class [SQL] should also return them unchanged. expect_identical(dbQuoteLiteral(con, SQL(simple)), SQL(simple)) expect_identical(dbQuoteLiteral(con, SQL(letters)), SQL(letters)) expect_identical(dbQuoteLiteral(con, SQL(empty)), SQL(empty)) #' (For backends it may be most convenient to return [SQL] objects #' to achieve this behavior, but this is not required.) }, quote_literal_roundtrip = function(ctx, con) { #' @section Specification: do_test_literal <- function(x) { #' The returned expression can be used in a `SELECT ...` query, literals <- map_chr(x, dbQuoteLiteral, conn = con) query <- paste0("SELECT ", paste(literals, collapse = ", ")) #' and the value of #' \code{dbGetQuery(paste0("SELECT ", dbQuoteLiteral(x)))[[1]]} #' must be equal to `x` x_out <- check_df(dbGetQuery(con, query)) expect_equal(nrow(x_out), 1L) is_logical <- map_lgl(x, is.logical) x_out[is_logical] <- map(x_out[is_logical], as.logical) is_numeric <- map_lgl(x, is.numeric) x_out[is_numeric] <- map(x_out[is_numeric], as.numeric) expect_equal(as.list(unname(x_out)), x) } #' for any scalar test_literals <- list( #' integer, 1L, #' numeric, 2.5, #' string, "string", #' and logical. TRUE ) do_test_literal(test_literals) }, # quote_literal_na = function(ctx, con) { null <- dbQuoteLiteral(con, NA_character_) quoted_null <- dbQuoteLiteral(con, as.character(null)) na <- dbQuoteLiteral(con, "NA") quoted_na <- dbQuoteLiteral(con, as.character(na)) query <- paste0( "SELECT ", null, " AS null_return,", na, " AS na_return,", quoted_null, " AS quoted_null,", quoted_na, " AS quoted_na" ) #' If `x` is `NA`, the result must merely satisfy [is.na()]. rows <- check_df(dbGetQuery(con, query)) expect_true(is.na(rows$null_return)) #' The literals `"NA"` or `"NULL"` are not treated specially. expect_identical(rows$na_return, "NA") expect_identical(rows$quoted_null, as.character(null)) expect_identical(rows$quoted_na, as.character(na)) }, # #' quote_literal_na_is_null = function(ctx, con) { #' `NA` should be translated to an unquoted SQL `NULL`, null <- dbQuoteLiteral(con, NA_character_) #' so that the query `SELECT * FROM (SELECT 1) a WHERE ... IS NULL` rows <- check_df(dbGetQuery(con, paste0("SELECT * FROM (SELECT 1) a WHERE ", null, " IS NULL"))) #' returns one row. expect_equal(nrow(rows), 1L) }, #' quote_literal_error = function(ctx, con) { #' @section Failure modes: #' #' Passing a list expect_error(dbQuoteString(con, as.list(1:3))) #' for the `x` argument raises an error. }, # NULL ) DBItest/R/generics.R0000644000176200001440000000202614602017371013674 0ustar liggesusers# Created by create_generics(), do not edit by hand all_dbi_generics <- function() { c( "Id", "dbAppendTable", "dbAppendTableArrow", "dbBegin", "dbBind", "dbBindArrow", "dbCanConnect", "dbClearResult", "dbColumnInfo", "dbCommit", "dbConnect", "dbCreateTable", "dbCreateTableArrow", "dbDataType", "dbDisconnect", "dbExecute", "dbExistsTable", "dbFetch", "dbFetchArrow", "dbFetchArrowChunk", "dbGetInfo", "dbGetQuery", "dbGetQueryArrow", "dbGetRowCount", "dbGetRowsAffected", "dbGetStatement", "dbHasCompleted", "dbIsReadOnly", "dbIsValid", "dbListFields", "dbListObjects", "dbListTables", "dbQuoteIdentifier", "dbQuoteLiteral", "dbQuoteString", "dbReadTable", "dbReadTableArrow", "dbRemoveTable", "dbRollback", "dbSendQuery", "dbSendQueryArrow", "dbSendStatement", "dbUnquoteIdentifier", "dbWithTransaction", "dbWriteTable", "dbWriteTableArrow", NULL ) } DBItest/R/spec-result-send-statement.R0000644000176200001440000001226114602017371017276 0ustar liggesusers#' spec_result_send_statement #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_send_statement <- list( send_statement_formals = function() { # expect_equal(names(formals(dbSendStatement)), c("conn", "statement", "...")) }, send_statement_trivial = function(ctx, con, table_name) { #' @return #' `dbSendStatement()` returns res <- expect_visible(dbSendStatement(con, trivial_statement(ctx, table_name))) #' an S4 object that inherits from [DBIResult-class]. expect_s4_class(res, "DBIResult") #' The result set can be used with [dbGetRowsAffected()] to #' determine the number of rows affected by the query. expect_error(dbGetRowsAffected(res), NA) #' Once you have finished using a result, make sure to clear it #' with [dbClearResult()]. dbClearResult(res) }, #' send_statement_closed_connection = function(ctx, closed_con) { #' @section Failure modes: #' An error is raised when issuing a statement over a closed table_name <- "dbit10" expect_error(dbSendStatement(closed_con, trivial_statement(ctx, table_name = table_name))) }, send_statement_invalid_connection = function(ctx, invalid_con) { #' or invalid connection, table_name <- "dbit11" expect_error(dbSendStatement(invalid_con, trivial_statement(ctx, table_name = table_name))) }, send_statement_non_string = function(con) { #' or if the statement is not a non-`NA` string. expect_error(dbSendStatement(con, character())) expect_error(dbSendStatement(con, letters)) expect_error(dbSendStatement(con, NA_character_)) }, send_statement_syntax_error = function(con) { #' An error is also raised if the syntax of the query is invalid #' and all query parameters are given (by passing the `params` argument) #' or the `immediate` argument is set to `TRUE`. #' #' @section Failure modes: expect_error(dbSendStatement(con, "CREATTE", params = list())) expect_error(dbSendStatement(con, "CREATTE", immediate = TRUE)) }, send_statement_result_valid = function(ctx, con, table_name) { #' @section Specification: #' No warnings occur under normal conditions. expect_warning(res <- dbSendStatement(con, trivial_statement(ctx, table_name)), NA) #' When done, the DBIResult object must be cleared with a call to #' [dbClearResult()]. dbClearResult(res) }, # send_statement_stale_warning = function(ctx) { #' Failure to clear the result set leads to a warning #' when the connection is closed. con <- connect(ctx) on.exit(dbDisconnect(con)) expect_warning(dbSendStatement(con, trivial_query()), NA) expect_warning({ dbDisconnect(con) gc() }) on.exit(NULL) }, send_statement_only_one_result_set = function(ctx, con, table_name) { #' If the backend supports only one open result set per connection, res1 <- dbSendStatement(con, trivial_statement(ctx, table_name)) other_table_name <- random_table_name() local_remove_test_table(con, other_table_name) #' issuing a second query invalidates an already open result set #' and raises a warning. query <- ctx$tweaks$create_table_as(other_table_name, "SELECT 1 AS a") expect_warning(res2 <- dbSendStatement(con, query)) expect_false(dbIsValid(res1)) #' The newly opened result set is valid expect_true(dbIsValid(res2)) #' and must be cleared with `dbClearResult()`. dbClearResult(res2) }, #' @section Additional arguments: #' The following arguments are not part of the `dbSendStatement()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `params` (default: `NULL`) #' - `immediate` (default: `NULL`) #' #' They must be provided as named arguments. #' See the "Specification" sections for details on their usage. send_statement_params = function(ctx, con) { #' @section Specification: #' #' The `param` argument allows passing query parameters, see [dbBind()] for details. placeholder_funs <- get_placeholder_funs(ctx) for (placeholder_fun in placeholder_funs) { table_name <- random_table_name() local_remove_test_table(con, table_name) dbWriteTable(con, table_name, data.frame(a = as.numeric(1:3))) placeholder <- placeholder_fun(1) query <- paste0("DELETE FROM ", table_name, " WHERE a > ", placeholder) values <- 1.5 params <- stats::setNames(list(values), names(placeholder)) rs <- dbSendStatement(con, query, params = params) rc <- dbGetRowsAffected(rs) if (isTRUE(ctx$tweaks$allow_na_rows_affected)) { expect_true((is.na(rc) && is.numeric(rc)) || rc == 2L, info = placeholder) } else { expect_equal(rc, 2L, info = placeholder) } dbClearResult(rs) } }, send_statement_immediate = function(ctx, con, table_name) { #' @inheritSection spec_result_get_query Specification for the `immediate` argument res <- expect_visible(dbSendStatement(con, trivial_statement(ctx, table_name), immediate = TRUE)) expect_s4_class(res, "DBIResult") expect_error(dbGetRowsAffected(res), NA) dbClearResult(res) }, # NULL ) DBItest/R/spec-meta-get-rows-affected.R0000644000176200001440000000437214602017371017265 0ustar liggesusers#' spec_meta_get_rows_affected #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_get_rows_affected <- list( get_rows_affected_formals = function() { # expect_equal(names(formals(dbGetRowsAffected)), c("res", "...")) }, rows_affected_statement = function(ctx, con, table_name) { #' @return #' `dbGetRowsAffected()` returns a scalar number (integer or numeric), #' the number of rows affected by a data manipulation statement dbWriteTable(con, table_name, data.frame(a = 1:10)) query <- paste0( "DELETE FROM ", dbQuoteIdentifier(con, table_name), " ", "WHERE a < 6" ) #' issued with [dbSendStatement()]. res <- local_result(dbSendStatement(con, query)) rc <- dbGetRowsAffected(res) #' The value is available directly after the call if (isTRUE(ctx$tweaks$allow_na_rows_affected)) { expect_true((is.na(rc) && is.numeric(rc)) || rc == 5L) } else { expect_equal(rc, 5L) } expect_warning(check_df(dbFetch(res))) rc <- dbGetRowsAffected(res) #' and does not change after calling [dbFetch()]. if (isTRUE(ctx$tweaks$allow_na_rows_affected)) { expect_true((is.na(rc) && is.numeric(rc)) || rc == 5L) } else { expect_equal(rc, 5L) } #' `NA_integer_` or `NA_numeric_` are allowed if the number of rows affected is not known. }, # rows_affected_query = function(ctx, con) { query <- trivial_query() #' #' For queries issued with [dbSendQuery()], res <- local_result(dbSendQuery(con, query)) rc <- dbGetRowsAffected(res) #' zero is returned before expect_equal(rc, 0L) check_df(dbFetch(res)) rc <- dbGetRowsAffected(res) #' and after the call to `dbFetch()`. expect_equal(rc, 0L) #' `NA` values are not allowed. }, #' get_rows_affected_error = function(con, table_name) { #' @section Failure modes: query <- paste0( "CREATE TABLE ", dbQuoteIdentifier(con, table_name), " (a integer)" ) res <- dbSendStatement(con, query) dbClearResult(res) #' Attempting to get the rows affected for a result set cleared with #' [dbClearResult()] gives an error. expect_error(dbGetRowsAffected(res)) }, # NULL ) DBItest/R/spec-meta-column-info.R0000644000176200001440000000633014602017371016201 0ustar liggesusers#' spec_meta_column_info #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_column_info <- list( column_info_formals = function() { # expect_equal(names(formals(dbColumnInfo)), c("res", "...")) }, column_info = function(ctx, con, table_name) { #' @return #' `dbColumnInfo()` penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) res <- local_result(dbSendQuery(con, paste0("SELECT * FROM ", table_name))) fields <- dbColumnInfo(res) #' returns a data frame expect_s3_class(fields, "data.frame") #' with at least two columns `"name"` and `"type"` (in that order) expect_equal(names(fields)[1:2], c("name", "type")) #' (and optional columns that start with a dot). expect_true(all(grepl("^[.]", names(fields)[-1:-2]))) #' The `"name"` and `"type"` columns contain the names and types #' of the R columns of the data frame that is returned from [`dbFetch()`]. penguins_ret <- dbFetch(res) expect_identical(fields$name, names(penguins_ret)) #' The `"type"` column is of type `character` and only for information. expect_type(fields$type, "character") #' Do not compute on the `"type"` column, instead use `dbFetch(res, n = 0)` #' to create a zero-row data frame initialized with the correct data types. }, #' column_info_closed = function(con) { #' @section Failure modes: #' An attempt to query columns for a closed result set raises an error. query <- trivial_query() res <- dbSendQuery(con, query) dbClearResult(res) expect_error(dbColumnInfo(res)) }, column_info_row_names = function(con, table_name) { #' @section Specification: #' #' A column named `row_names` is treated like any other column. dbWriteTable(con, table_name, data.frame(a = 1L, row_names = 2L)) res <- local_result(dbSendQuery(con, paste0("SELECT * FROM ", table_name))) expect_identical(dbColumnInfo(res)$name, c("a", "row_names")) }, #' column_info_consistent = function(ctx, con) { res <- local_result(dbSendQuery(con, "SELECT 1.5 AS a, 2.5 AS b")) #' The column names are always consistent info <- dbColumnInfo(res) #' with the data returned by `dbFetch()`. data <- dbFetch(res) expect_identical(info$name, names(data)) }, #' column_info_consistent_unnamed = function(ctx, con) { # odbc package skip_if_not_dbitest(ctx, "1.7.2") #' If the query returns unnamed columns, res <- local_result(dbSendQuery(con, "SELECT 1.5, 2.5 AS a, 1.5, 3.5")) info <- dbColumnInfo(res) data <- dbFetch(res) expect_identical(info$name, names(data)) expect_equal(data[["a"]], 2.5) #' non-empty and non-`NA` names are assigned. expect_false(anyNA(names(data))) expect_true(all(names(data) != "")) }, #' column_info_consistent_keywords = function(ctx, con) { #' Column names that correspond to SQL or R keywords are left unchanged. res <- local_result(dbSendQuery(con, paste0("SELECT 1.5 AS ", dbQuoteIdentifier(con, "for")))) info <- dbColumnInfo(res) data <- dbFetch(res) expect_identical(info$name, names(data)) expect_equal(data[["for"]], 1.5) }, # NULL ) DBItest/R/spec-arrow-send-query-arrow.R0000644000176200001440000001132114602017371017377 0ustar liggesusers#' spec_result_send_query #' @family Arrow specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_arrow_send_query_arrow <- list( arrow_send_query_formals = function() { # expect_equal(names(formals(dbSendQueryArrow)), c("conn", "statement", "...")) }, arrow_send_query_trivial = function(con) { #' @return #' `dbSendQueryArrow()` returns res <- expect_visible(dbSendQueryArrow(con, trivial_query())) #' an S4 object that inherits from [DBIResultArrow-class]. expect_s4_class(res, "DBIResultArrow") #' The result set can be used with [dbFetchArrow()] to extract records. expect_equal(check_arrow(dbFetchArrow(res))[[1]], 1.5) #' Once you have finished using a result, make sure to clear it #' with [dbClearResult()]. dbClearResult(res) }, #' arrow_send_query_closed_connection = function(ctx, closed_con) { #' @section Failure modes: #' An error is raised when issuing a query over a closed expect_error(dbSendQueryArrow(closed_con, trivial_query())) }, arrow_send_query_invalid_connection = function(ctx, invalid_con) { #' or invalid connection, expect_error(dbSendQueryArrow(invalid_con, trivial_query())) }, arrow_send_query_non_string = function(con) { #' or if the query is not a non-`NA` string. expect_error(dbSendQueryArrow(con, character())) expect_error(dbSendQueryArrow(con, letters)) expect_error(dbSendQueryArrow(con, NA_character_)) }, arrow_send_query_syntax_error = function(ctx, con) { skip_if_not_dbitest(ctx, "1.7.99.5") #' An error is also raised if the syntax of the query is invalid #' and all query parameters are given (by passing the `params` argument) #' or the `immediate` argument is set to `TRUE`. #' #' @section Failure modes: expect_error(dbSendQueryArrow(con, "SELLECT", params = list())) expect_error(dbSendQueryArrow(con, "SELLECT", immediate = TRUE)) }, #' @section Additional arguments: #' The following arguments are not part of the `dbSendQueryArrow()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `params` (default: `NULL`) #' - `immediate` (default: `NULL`) #' #' They must be provided as named arguments. #' See the "Specification" sections for details on their usage. arrow_send_query_result_valid = function(ctx, con) { skip_if_not_dbitest(ctx, "1.7.99.6") #' @section Specification: #' No warnings occur under normal conditions. expect_warning(res <- dbSendQueryArrow(con, trivial_query()), NA) #' When done, the DBIResult object must be cleared with a call to #' [dbClearResult()]. dbClearResult(res) }, # arrow_send_query_stale_warning = function(ctx) { skip_if_not_dbitest(ctx, "1.7.99.7") #' Failure to clear the result set leads to a warning #' when the connection is closed. con <- connect(ctx) on.exit(dbDisconnect(con)) expect_warning(dbSendQueryArrow(con, trivial_query()), NA) expect_warning({ dbDisconnect(con) gc() }) on.exit(NULL) }, #' arrow_send_query_only_one_result_set = function(ctx, con) { skip_if_not_dbitest(ctx, "1.7.99.8") #' If the backend supports only one open result set per connection, res1 <- dbSendQueryArrow(con, trivial_query()) #' issuing a second query invalidates an already open result set #' and raises a warning. expect_warning(res2 <- dbSendQueryArrow(con, "SELECT 2")) expect_false(dbIsValid(res1)) #' The newly opened result set is valid expect_true(dbIsValid(res2)) #' and must be cleared with `dbClearResult()`. dbClearResult(res2) }, #' arrow_send_query_params = function(ctx, con) { skip_if_not_dbitest(ctx, "1.7.99.9") #' The `param` argument allows passing query parameters, see [dbBind()] for details. placeholder_funs <- get_placeholder_funs(ctx) for (placeholder_fun in placeholder_funs) { placeholder <- placeholder_fun(1) query <- paste0("SELECT ", placeholder, " + 1.0 AS a") values <- trivial_values(3) - 1 params <- stats::setNames(list(values), names(placeholder)) rs <- dbSendQueryArrow(con, query, params = params) ret <- dbFetch(rs) expect_equal(ret, trivial_df(3), info = placeholder) dbClearResult(rs) } }, arrow_send_query_immediate = function(ctx, con, table_name) { skip_if_not_dbitest(ctx, "1.7.99.10") #' @inheritSection spec_result_get_query Specification for the `immediate` argument res <- expect_visible(dbSendQueryArrow(con, trivial_query(), immediate = TRUE)) expect_s4_class(res, "DBIResultArrow") expect_error(dbGetRowsAffected(res), NA) dbClearResult(res) }, NULL ) DBItest/R/spec-arrow-fetch-arrow.R0000644000176200001440000000612414602020561016374 0ustar liggesusers#' spec_arrow_fetch_arrow #' @family Arrow specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_arrow_fetch_arrow <- list( arrow_fetch_arrow_formals = function() { # expect_equal(names(formals(dbFetchArrow)), c("res", "...")) }, arrow_fetch_arrow_atomic = function(con) { #' @return #' `dbFetchArrow()` always returns an object coercible to a [data.frame] with #' as many rows as records were fetched and as many #' columns as fields in the result set, #' even if the result is a single value query <- trivial_query() res <- local_result(dbSendQueryArrow(con, query)) rows <- check_arrow(dbFetchArrow(res)) expect_equal(rows, data.frame(a = 1.5)) }, arrow_fetch_arrow_one_row = function(con) { #' or has one query <- trivial_query(3, letters[1:3]) result <- trivial_df(3, letters[1:3]) res <- local_result(dbSendQueryArrow(con, query)) rows <- check_arrow(dbFetchArrow(res)) expect_identical(rows, result) }, arrow_fetch_arrow_zero_rows = function(con) { #' or zero rows. query <- "SELECT * FROM (SELECT 1 as a, 2 as b, 3 as c) AS x WHERE (1 = 0)" res <- local_result(dbSendQueryArrow(con, query)) rows <- check_arrow(dbFetchArrow(res)) expect_identical(class(rows), "data.frame") }, #' arrow_fetch_arrow_closed = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.15") #' @section Failure modes: #' An attempt to fetch from a closed result set raises an error. query <- trivial_query() res <- dbSendQueryArrow(con, query) dbClearResult(res) expect_error(dbFetchArrow(res)) }, arrow_fetch_arrow_multi_row_single_column = function(ctx, con) { #' @section Specification: #' Fetching multi-row queries with one query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(3) res <- local_result(dbSendQueryArrow(con, query)) rows <- check_arrow(dbFetchArrow(res)) expect_identical(rows, result) }, arrow_fetch_arrow_multi_row_multi_column = function(ctx, con) { #' or more columns by default returns the entire result. query <- sql_union( .ctx = ctx, paste("SELECT", 1:5 + 0.5, "AS a,", 4:0 + 0.5, "AS b"), .order_by = "a" ) res <- local_result(dbSendQueryArrow(con, query)) rows <- check_arrow(dbFetchArrow(res)) expect_identical(rows, data.frame(a = 1:5 + 0.5, b = 4:0 + 0.5)) }, arrow_fetch_arrow_record_batch_reader = function(ctx, con) { #' The object returned by `dbFetchArrow()` can also be passed to #' [nanoarrow::as_nanoarrow_array_stream()] to create a nanoarrow #' array stream object that can be used to read the result set #' in batches. query <- trivial_query(25, .ctx = ctx, .order_by = "a") result <- trivial_df(25) res <- local_result(dbSendQueryArrow(con, query)) stream <- dbFetchArrow(res) rbr <- nanoarrow::as_nanoarrow_array_stream(stream) #' The chunk size is implementation-specific. out <- as.data.frame(rbr$get_next()) expect_equal(out, head(result, nrow(out))) }, # NULL ) DBItest/R/run.R0000644000176200001440000000740714602061640012707 0ustar liggesusersrun_tests <- function(ctx, tests, skip, run_only, test_suite) { tests_sym <- enexpr(tests) stopifnot(is_symbol(tests_sym)) tests_qual <- call2(":::", sym("DBItest"), tests_sym) "!DEBUG run_tests(`test_suite`)" if (is.null(ctx)) { stop("Need to call make_context() to use the test_...() functions.", call. = FALSE) } if (!inherits(ctx, "DBItest_context")) { stop("ctx must be a DBItest_context object created by make_context().", call. = FALSE) } test_context <- paste0( "DBItest", if (!is.null(ctx$name)) paste0("[", ctx$name, "]"), ": ", test_suite ) tests <- compact(tests) tests <- get_run_only_tests(tests, run_only) if (is.null(skip)) { skip <- ctx$default_skip } test_names <- names(tests) skipped <- get_skip_names(skip) skip_flag <- names(tests) %in% skipped if (length(tests) > 0) { global_con <- local_connection(ctx) } ok <- vapply( seq_along(tests), function(test_idx) { test_name <- test_names[[test_idx]] if (skip_flag[[test_idx]]) { FALSE } else { test_fun <- tests[[test_idx]] fmls <- formals(test_fun) test_that(paste0(test_context, ": ", test_name), { args <- list() if ("ctx" %in% names(fmls)) { args <- c(args, list(ctx = expr(ctx))) } if ("con" %in% names(fmls)) { args <- c(args, list(con = expr(global_con))) } if ("local_con" %in% names(fmls)) { local_con <- local_connection(ctx) args <- c(args, list(local_con = expr(local_con))) } if ("closed_con" %in% names(fmls)) { closed_con <- local_closed_connection(ctx) args <- c(args, list(closed_con = expr(closed_con))) } if ("invalid_con" %in% names(fmls)) { invalid_con <- local_invalid_connection(ctx) args <- c(args, list(invalid_con = expr(invalid_con))) } if ("table_name" %in% names(fmls)) { if (is_missing(fmls$table_name)) { table_name <- random_table_name() } else { table_name <- fmls$table_name } local_remove_test_table(global_con, table_name) args <- c(args, list(table_name = table_name)) } # Example of generated expression: # DBItest:::spec_arrow$arrow_append_table_arrow_roundtrip_64_bit_roundtrip(...) test_fun_expr <- expr(`$`(!!tests_qual, !!test_name)(!!!args)) expect_warning( eval(test_fun_expr), NA ) }) } }, logical(1L) ) if (any(skip_flag)) { test_that(paste0(test_context, ": skipped tests"), { skip(paste0("DBItest::run_tests(): by request: ", paste(names(tests)[skip_flag], collapse = ", "))) }) } # to isolate test topics gc() ok } get_skip_names <- function(skip) { if (length(skip) == 0L) { return(character()) } names_all <- names(spec_all) names_all <- names_all[names_all != ""] skip_flags_all <- map(paste0("(?:^(?:", skip, ")(?:|_[0-9]+)$)"), grepl, names_all, perl = TRUE) skip_used <- map_lgl(skip_flags_all, any) if (!all(skip_used)) { warning("Unused skip expressions: ", paste(skip[!skip_used], collapse = ", "), call. = FALSE ) } skip_flag_all <- Reduce(`|`, skip_flags_all) skip_tests <- names_all[skip_flag_all] skip_tests } get_run_only_tests <- function(tests, run_only) { names_all <- names(tests) names_all <- names_all[names_all != ""] if (is.null(run_only)) { return(tests) } run_only_flags_all <- map(paste0("(?:^(?:", run_only, ")$)"), grepl, names_all, perl = TRUE) run_only_flag_all <- Reduce(`|`, run_only_flags_all) run_only_tests <- names_all[run_only_flag_all] tests[run_only_tests] } DBItest/R/spec-driver-constructor.R0000644000176200001440000000260414602020561016700 0ustar liggesusers#' spec_driver_constructor #' @family driver specifications #' @usage NULL #' @format NULL #' @keywords NULL #' @section Construction of the DBIDriver object: spec_driver_constructor <- list( constructor = function(ctx) { pkg_name <- package_name(ctx) #' The backend must support creation of an instance of its [DBIDriver-class] subclass #' with a \dfn{constructor function}. #' By default, its name is the package name without the leading \sQuote{R} #' (if it exists), e.g., `SQLite` for the \pkg{RSQLite} package. default_constructor_name <- gsub("^R", "", pkg_name) #' However, backend authors may choose a different name. constructor_name <- ctx$tweaks$constructor_name %||% default_constructor_name #' The constructor must be exported, and pkg_env <- getNamespace(pkg_name) eval(bquote( expect_true(.(constructor_name) %in% getNamespaceExports(pkg_env)) )) #' it must be a function eval(bquote( expect_true(exists(.(constructor_name), mode = "function", pkg_env)) )) constructor <- get(constructor_name, mode = "function", pkg_env) #' that is callable without arguments. expect_all_args_have_default_values(constructor) #' DBI recommends to define a constructor with an empty argument list. if (!isTRUE(ctx$tweaks$constructor_relax_args)) { expect_arglist_is_empty(constructor) } }, # NULL ) DBItest/R/spec-sql-remove-table.R0000644000176200001440000001376014602020561016206 0ustar liggesusers#' spec_sql_remove_table #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_remove_table <- list( remove_table_formals = function() { # expect_equal(names(formals(dbRemoveTable)), c("conn", "name", "...")) }, remove_table_return = function(ctx, con, table_name) { #' @return #' `dbRemoveTable()` returns `TRUE`, invisibly. penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) expect_invisible_true(dbRemoveTable(con, table_name)) }, #' remove_table_missing = function(con, table_name) { #' @section Failure modes: #' If the table does not exist, an error is raised. expect_error(dbRemoveTable(con, table_name)) }, remove_table_closed_connection = function(ctx, con, table_name) { #' An attempt to remove a view with this function may result in an error. #' #' #' An error is raised when calling this method for a closed dbWriteTable(con, table_name, data.frame(a = 1)) con2 <- local_closed_connection(ctx = ctx) expect_error(dbRemoveTable(con2, table_name)) }, remove_table_invalid_connection = function(ctx, con, table_name) { #' or invalid connection. dbWriteTable(con, table_name, data.frame(a = 1)) con2 <- local_invalid_connection(ctx) expect_error(dbRemoveTable(con2, table_name)) }, remove_table_error = function(con, table_name) { #' An error is also raised dbWriteTable(con, table_name, data.frame(a = 1L)) #' if `name` cannot be processed with [dbQuoteIdentifier()] or expect_error(dbRemoveTable(con, NA)) #' if this results in a non-scalar. expect_error(dbRemoveTable(con, c(table_name, table_name))) }, #' @section Additional arguments: #' The following arguments are not part of the `dbRemoveTable()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `temporary` (default: `FALSE`) #' - `fail_if_missing` (default: `TRUE`) #' #' These arguments must be provided as named arguments. #' remove_table_temporary_arg = function(ctx, con, table_name) { #' If `temporary` is `TRUE`, the call to `dbRemoveTable()` #' will consider only temporary tables. #' Not all backends support this argument. if (!isTRUE(ctx$tweaks$temporary_tables)) { skip("tweak: temporary_tables") } dbWriteTable(con, table_name, data.frame(a = 1.5)) expect_equal(dbReadTable(con, table_name), data.frame(a = 1.5)) dbCreateTable(con, table_name, data.frame(b = 2.5), temporary = TRUE) dbRemoveTable(con, table_name, temporary = TRUE) #' In particular, permanent tables of the same name are left untouched. expect_error(dbRemoveTable(con, table_name, temporary = TRUE)) expect_equal(dbReadTable(con, table_name), data.frame(a = 1.5)) }, #' remove_table_missing_succeed = function(con, table_name) { #' If `fail_if_missing` is `FALSE`, the call to `dbRemoveTable()` #' succeeds if the table does not exist. expect_error(dbRemoveTable(con, table_name, fail_if_missing = FALSE), NA) }, remove_table_list = function(con, table_name) { #' @section Specification: #' A table removed by `dbRemoveTable()` doesn't appear in the list of tables #' returned by [dbListTables()], #' and [dbExistsTable()] returns `FALSE`. dbWriteTable(con, table_name, data.frame(a = 1L)) expect_true(table_name %in% dbListTables(con)) expect_true(dbExistsTable(con, table_name)) dbRemoveTable(con, table_name) expect_false(table_name %in% dbListTables(con)) expect_false(dbExistsTable(con, table_name)) }, remove_table_other_con = function(ctx, con, table_name) { #' The removal propagates immediately to other connections to the same database. con2 <- local_connection(ctx) dbWriteTable(con, table_name, data.frame(a = 1L)) expect_true(table_name %in% dbListTables(con2)) expect_true(dbExistsTable(con2, table_name)) dbRemoveTable(con, table_name) expect_false(table_name %in% dbListTables(con2)) expect_false(dbExistsTable(con2, table_name)) }, remove_table_temporary = function(ctx, con, table_name) { #' This function can also be used to remove a temporary table. if (!isTRUE(ctx$tweaks$temporary_tables)) { skip("tweak: temporary_tables") } dbWriteTable(con, table_name, data.frame(a = 1L), temporary = TRUE) if (isTRUE(ctx$tweaks$list_temporary_tables)) { expect_true(table_name %in% dbListTables(con)) } expect_true(dbExistsTable(con, table_name)) dbRemoveTable(con, table_name) if (isTRUE(ctx$tweaks$list_temporary_tables)) { expect_false(table_name %in% dbListTables(con)) } expect_false(dbExistsTable(con, table_name)) }, #' remove_table_name = function(ctx, con) { #' The `name` argument is processed as follows, #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } test_in <- data.frame(a = 1L) for (table_name in table_names) { local_remove_test_table(con, table_name) #' - If an unquoted table name as string: `dbRemoveTable()` will do the #' quoting, dbWriteTable(con, table_name, test_in) expect_true(dbRemoveTable(con, table_name)) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` } }, remove_table_name_quoted = function(ctx, con) { #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done skip_if_not_dbitest(ctx, "1.7.2") if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } test_in <- data.frame(a = 1L) for (table_name in table_names) { local_remove_test_table(con, table_name) dbWriteTable(con, table_name, test_in) expect_true(dbRemoveTable(con, dbQuoteIdentifier(con, table_name))) } }, # NULL ) DBItest/R/spec-connection-data-type.R0000644000176200001440000000020014602017371017042 0ustar liggesusersspec_connection_data_type <- list( data_type_connection = function(ctx, con) { test_data_type(ctx, con) }, # NULL ) DBItest/R/tweaks.R0000644000176200001440000001436514602061640013402 0ustar liggesusers#' Tweaks for DBI tests #' #' The tweaks are a way to control the behavior of certain tests. Currently, #' you need to search the \pkg{DBItest} source code to understand which tests #' are affected by which tweaks. This function is usually called to set the #' `tweaks` argument in a [make_context()] call. #' #' @name tweaks #' @aliases NULL #' @examples #' \dontrun{ #' make_context(..., tweaks = tweaks(strict_identifier = TRUE)) #' } { # nolint tweak_names <- alist( #' @param ... `[any]`\cr #' Unknown tweaks are accepted, with a warning. The ellipsis #' also makes sure that you only can pass named arguments. "..." = , #' @param constructor_name `[character(1)]`\cr #' Name of the function that constructs the `Driver` object. "constructor_name" = NULL, #' @param constructor_relax_args `[logical(1)]`\cr #' If `TRUE`, allow a driver constructor with default values for all #' arguments; otherwise, require a constructor with empty argument list #' (default). "constructor_relax_args" = FALSE, #' @param strict_identifier `[logical(1)]`\cr #' Set to `TRUE` if the DBMS does not support arbitrarily-named #' identifiers even when quoting is used. "strict_identifier" = FALSE, #' @param omit_blob_tests `[logical(1)]`\cr #' Set to `TRUE` if the DBMS does not support a `BLOB` data #' type. "omit_blob_tests" = FALSE, #' @param current_needs_parens `[logical(1)]`\cr #' Set to `TRUE` if the SQL functions `current_date`, #' `current_time`, and `current_timestamp` require parentheses. "current_needs_parens" = FALSE, #' @param union `[function(character)]`\cr #' Function that combines several subqueries into one so that the #' resulting query returns the concatenated results of the subqueries "union" = function(x) paste(x, collapse = " UNION "), #' @param placeholder_pattern `[character]`\cr #' A pattern for placeholders used in [dbBind()], e.g., #' `"?"`, `"$1"`, or `":name"`. See #' [make_placeholder_fun()] for details. "placeholder_pattern" = NULL, #' @param logical_return `[function(logical)]`\cr #' A vectorized function that converts logical values to the data type #' returned by the DBI backend. "logical_return" = identity, #' @param date_cast `[function(character)]`\cr #' A vectorized function that creates an SQL expression for coercing a #' string to a date value. "date_cast" = function(x) paste0("date('", x, "')"), #' @param time_cast `[function(character)]`\cr #' A vectorized function that creates an SQL expression for coercing a #' string to a time value. "time_cast" = function(x) paste0("time('", x, "')"), #' @param timestamp_cast `[function(character)]`\cr #' A vectorized function that creates an SQL expression for coercing a #' string to a timestamp value. "timestamp_cast" = function(x) paste0("timestamp('", x, "')"), #' @param blob_cast `[function(character)]`\cr #' A vectorized function that creates an SQL expression for coercing a #' string to a blob value. "blob_cast" = identity, #' @param date_typed `[logical(1L)]`\cr #' Set to `FALSE` if the DBMS doesn't support a dedicated type for dates. "date_typed" = TRUE, #' @param time_typed `[logical(1L)]`\cr #' Set to `FALSE` if the DBMS doesn't support a dedicated type for times. "time_typed" = TRUE, #' @param timestamp_typed `[logical(1L)]`\cr #' Set to `FALSE` if the DBMS doesn't support a dedicated type for #' timestamps. "timestamp_typed" = TRUE, #' @param temporary_tables `[logical(1L)]`\cr #' Set to `FALSE` if the DBMS doesn't support temporary tables. "temporary_tables" = TRUE, #' @param list_temporary_tables `[logical(1L)]`\cr #' Set to `FALSE` if the DBMS doesn't support listing temporary tables. "list_temporary_tables" = TRUE, #' @param allow_na_rows_affected `[logical(1L)]`\cr #' Set to `TRUE` to allow [dbGetRowsAffected()] to return `NA`. "allow_na_rows_affected" = FALSE, #' @param is_null_check `[function(character)]`\cr #' A vectorized function that creates an SQL expression for checking if a #' value is `NULL`. "is_null_check" = function(x) paste0("(", x, " IS NULL)"), #' @param create_table_as `[function(character(1), character(1))]`\cr #' A function that creates an SQL expression for creating a table #' from an SQL expression. "create_table_as" = function(table_name, query) paste0("CREATE TABLE ", table_name, " AS ", query), #' @param dbitest_version `[character(1)]`\cr #' Compatible DBItest version, default: "1.7.1". "dbitest_version" = "1.7.1", # Dummy argument NULL ) } # A helper function that constructs the tweaks() function in a DRY fashion. make_tweaks <- function(envir = parent.frame()) { fmls <- tweak_names[-length(tweak_names)] tweak_quoted <- map(setNames(nm = names(fmls)), as.name) tweak_quoted <- c(tweak_quoted) list_call <- as.call(c(quote(list), tweak_quoted[-1])) fun <- eval(bquote( function() { unknown <- list(...) if (length(unknown) > 0) { if (is.null(names(unknown)) || any(names(unknown) == "")) { warning("All tweaks must be named", call. = FALSE) } else { warning("Unknown tweaks: ", paste(names(unknown), collapse = ", "), call. = FALSE ) } } ret <- .(list_call) ret <- compact(ret) structure(ret, class = "DBItest_tweaks") }, as.environment(list(list_call = list_call)) )) formals(fun) <- fmls environment(fun) <- envir fun } #' @export #' @rdname tweaks tweaks <- make_tweaks() #' @export format.DBItest_tweaks <- function(x, ...) { if (length(x) == 0L) { return("DBItest tweaks: Empty") } c( "DBItest tweaks:", unlist(mapply( function(name, value) { paste0(" ", name, ": ", format(value)[[1]]) }, names(x), unclass(x) )) ) } #' @export print.DBItest_tweaks <- function(x, ...) { cat(format(x), sep = "\n") } #' @export `$.DBItest_tweaks` <- function(x, tweak) { if (!(tweak %in% names(tweak_names))) { stop("Tweak not found: ", tweak, call. = FALSE) } NextMethod() } DBItest/R/spec-sql-list-tables.R0000644000176200001440000000507414602020561016046 0ustar liggesusers#' spec_sql_list_tables #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_list_tables <- list( list_tables_formals = function() { # expect_equal(names(formals(dbListTables)), c("conn", "...")) }, list_tables_1 = function(ctx, con, table_name = "dbit07") { #' @return #' `dbListTables()` tables <- dbListTables(con) #' returns a character vector expect_type(tables, "character") #' that enumerates all tables expect_false(table_name %in% tables) #' and views # TODO #' in the database. #' Tables added with [dbWriteTable()] are penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) #' part of the list. tables <- dbListTables(con) expect_true(table_name %in% tables) }, # second stage list_tables_2 = function(ctx, con) { # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit07" #' As soon a table is removed from the database, #' it is also removed from the list of database tables. tables <- dbListTables(con) expect_false(table_name %in% tables) }, #' list_tables_temporary = function(ctx, con, table_name) { #' The same applies to temporary tables if supported by the database. if (isTRUE(ctx$tweaks$temporary_tables) && isTRUE(ctx$tweaks$list_temporary_tables)) { dbWriteTable(con, table_name, data.frame(a = 1L), temporary = TRUE) tables <- dbListTables(con) expect_true(table_name %in% tables) } }, #' list_tables_quote = function(ctx, con) { #' The returned names are suitable for quoting with `dbQuoteIdentifier()`. if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { local_remove_test_table(con, table_name) dbWriteTable(con, dbQuoteIdentifier(con, table_name), data.frame(a = 2L)) tables <- dbListTables(con) expect_true(table_name %in% tables) expect_true(dbQuoteIdentifier(con, table_name) %in% dbQuoteIdentifier(con, tables)) } }, #' list_tables_closed_connection = function(ctx, closed_con) { #' @section Failure modes: #' An error is raised when calling this method for a closed expect_error(dbListTables(closed_con)) }, list_tables_invalid_connection = function(ctx, invalid_con) { #' or invalid connection. expect_error(dbListTables(invalid_con)) }, # NULL ) DBItest/R/spec-.R0000644000176200001440000000563414602017371013114 0ustar liggesusers# reverse order # Script to create new spec files from subspec names read from clipboard: # pbpaste | gsed 's/,//' | for i in $(cat); do f=$(echo $i | gsed 's/_/-/g;s/$/.R/'); echo "$i <- list(" > R/$f; echo ")" >> R/$f; echo "#' @include $f"; done | tac | pbcopy # # Example input: # test_xxx_1, # test_xxx_2, # # Output: Files R/test-xxx-1.R and R/test-xxx-2.R, and @include directives to stdout ##### All #' @include spec-all.R ##### Stress #' @include spec-stress.R #' @include spec-stress-connection.R ##### Aggregators #' @include spec-compliance.R #' @include spec-transaction.R #' @include spec-arrow.R #' @include spec-meta.R #' @include spec-sql.R #' @include spec-result.R #' @include spec-connection.R #' @include spec-driver.R ##### Later #' @include spec-meta-get-info-result.R #' @include spec-meta-column-info.R #' @include spec-sql-list-fields.R #' @include spec-connection-get-info.R #' @include spec-driver-get-info.R ##### Arrow #' @include spec-arrow-roundtrip.R #' @include spec-arrow-bind.R #' @include spec-arrow-append-table-arrow.R #' @include spec-arrow-create-table-arrow.R #' @include spec-arrow-write-table-arrow.R #' @include spec-arrow-read-table-arrow.R #' @include spec-arrow-get-query-arrow.R #' @include spec-arrow-fetch-arrow-chunk.R #' @include spec-arrow-fetch-arrow.R #' @include spec-arrow-send-query-arrow.R ##### Method specs #' @include spec-transaction-with-transaction.R #' @include spec-transaction-begin-commit-rollback.R #' @include spec-meta-get-rows-affected.R #' @include spec-meta-get-row-count.R #' @include spec-meta-get-statement.R #' @include spec-meta-has-completed.R #' @include spec-meta-is-valid.R #' @include spec-meta-bind-.R #' @include spec-meta-bind-arrow-stream.R #' @include spec-meta-bind-stream.R #' @include spec-meta-bind-arrow.R #' @include spec-meta-bind.R #' @include spec-meta-bind-expr.R #' @include spec-meta-bind-formals.R #' @include spec-meta-bind-runner.R #' @include spec-sql-list-objects.R #' @include spec-sql-remove-table.R #' @include spec-sql-exists-table.R #' @include spec-sql-list-tables.R #' @include spec-sql-write-table.R #' @include spec-sql-append-table.R #' @include spec-sql-create-table.R #' @include spec-sql-read-table.R #' @include spec-sql-unquote-identifier.R #' @include spec-sql-quote-identifier.R #' @include spec-sql-quote-literal.R #' @include spec-sql-quote-string.R #' @include spec-result-execute.R #' @include spec-result-send-statement.R #' @include spec-result-get-query.R #' @include spec-result-clear-result.R #' @include spec-result-roundtrip.R #' @include spec-result-fetch.R #' @include spec-result-send-query.R #' @include spec-connection-disconnect.R #' @include spec-driver-connect.R #' @include spec-result-create-table-with-data-type.R #' @include spec-connection-data-type.R #' @include spec-driver-data-type.R ##### Soft specs #' @include spec-driver-constructor.R #' @include spec-compliance-methods.R #' @include spec-getting-started.R NULL DBItest/R/compat-purrr.R0000644000176200001440000001232214602017411014523 0ustar liggesusers# nocov start - compat-purrr.R # Latest version: https://github.com/r-lib/rlang/blob/main/R/compat-purrr.R # This file provides a minimal shim to provide a purrr-like API on top of # base R functions. They are not drop-in replacements but allow a similar style # of programming. # # Changelog: # # 2022-06-07: # * `transpose()` is now more consistent with purrr when inner names # are not congruent (#1346). # # 2021-12-15: # * `transpose()` now supports empty lists. # # 2021-05-21: # * Fixed "object `x` not found" error in `imap()` (@mgirlich) # # 2020-04-14: # * Removed `pluck*()` functions # * Removed `*_cpl()` functions # * Used `as_function()` to allow use of `~` # * Used `.` prefix for helpers map <- function(.x, .f, ...) { .f <- as_function(.f, env = global_env()) lapply(.x, .f, ...) } walk <- function(.x, .f, ...) { map(.x, .f, ...) invisible(.x) } map_lgl <- function(.x, .f, ...) { .rlang_purrr_map_mold(.x, .f, logical(1), ...) } map_int <- function(.x, .f, ...) { .rlang_purrr_map_mold(.x, .f, integer(1), ...) } map_dbl <- function(.x, .f, ...) { .rlang_purrr_map_mold(.x, .f, double(1), ...) } map_chr <- function(.x, .f, ...) { .rlang_purrr_map_mold(.x, .f, character(1), ...) } .rlang_purrr_map_mold <- function(.x, .f, .mold, ...) { .f <- as_function(.f, env = global_env()) out <- vapply(.x, .f, .mold, ..., USE.NAMES = FALSE) names(out) <- names(.x) out } map2 <- function(.x, .y, .f, ...) { .f <- as_function(.f, env = global_env()) out <- mapply(.f, .x, .y, MoreArgs = list(...), SIMPLIFY = FALSE) if (length(out) == length(.x)) { set_names(out, names(.x)) } else { set_names(out, NULL) } } map2_lgl <- function(.x, .y, .f, ...) { as.vector(map2(.x, .y, .f, ...), "logical") } map2_int <- function(.x, .y, .f, ...) { as.vector(map2(.x, .y, .f, ...), "integer") } map2_dbl <- function(.x, .y, .f, ...) { as.vector(map2(.x, .y, .f, ...), "double") } map2_chr <- function(.x, .y, .f, ...) { as.vector(map2(.x, .y, .f, ...), "character") } imap <- function(.x, .f, ...) { map2(.x, names(.x) %||% seq_along(.x), .f, ...) } pmap <- function(.l, .f, ...) { .f <- as.function(.f) args <- .rlang_purrr_args_recycle(.l) do.call("mapply", c( FUN = list(quote(.f)), args, MoreArgs = quote(list(...)), SIMPLIFY = FALSE, USE.NAMES = FALSE )) } .rlang_purrr_args_recycle <- function(args) { lengths <- map_int(args, length) n <- max(lengths) stopifnot(all(lengths == 1L | lengths == n)) to_recycle <- lengths == 1L args[to_recycle] <- map(args[to_recycle], function(x) rep.int(x, n)) args } keep <- function(.x, .f, ...) { .x[.rlang_purrr_probe(.x, .f, ...)] } discard <- function(.x, .p, ...) { sel <- .rlang_purrr_probe(.x, .p, ...) .x[is.na(sel) | !sel] } map_if <- function(.x, .p, .f, ...) { matches <- .rlang_purrr_probe(.x, .p) .x[matches] <- map(.x[matches], .f, ...) .x } .rlang_purrr_probe <- function(.x, .p, ...) { if (is_logical(.p)) { stopifnot(length(.p) == length(.x)) .p } else { .p <- as_function(.p, env = global_env()) map_lgl(.x, .p, ...) } } compact <- function(.x) { Filter(length, .x) } transpose <- function(.l) { if (!length(.l)) { return(.l) } inner_names <- names(.l[[1]]) if (is.null(inner_names)) { fields <- seq_along(.l[[1]]) } else { fields <- set_names(inner_names) .l <- map(.l, function(x) { if (is.null(names(x))) { set_names(x, inner_names) } else { x } }) } # This way missing fields are subsetted as `NULL` instead of causing # an error .l <- map(.l, as.list) map(fields, function(i) { map(.l, .subset2, i) }) } every <- function(.x, .p, ...) { .p <- as_function(.p, env = global_env()) for (i in seq_along(.x)) { if (!rlang::is_true(.p(.x[[i]], ...))) return(FALSE) } TRUE } some <- function(.x, .p, ...) { .p <- as_function(.p, env = global_env()) for (i in seq_along(.x)) { if (rlang::is_true(.p(.x[[i]], ...))) return(TRUE) } FALSE } negate <- function(.p) { .p <- as_function(.p, env = global_env()) function(...) !.p(...) } reduce <- function(.x, .f, ..., .init) { f <- function(x, y) .f(x, y, ...) Reduce(f, .x, init = .init) } reduce_right <- function(.x, .f, ..., .init) { f <- function(x, y) .f(y, x, ...) Reduce(f, .x, init = .init, right = TRUE) } accumulate <- function(.x, .f, ..., .init) { f <- function(x, y) .f(x, y, ...) Reduce(f, .x, init = .init, accumulate = TRUE) } accumulate_right <- function(.x, .f, ..., .init) { f <- function(x, y) .f(y, x, ...) Reduce(f, .x, init = .init, right = TRUE, accumulate = TRUE) } detect <- function(.x, .f, ..., .right = FALSE, .p = is_true) { .p <- as_function(.p, env = global_env()) .f <- as_function(.f, env = global_env()) for (i in .rlang_purrr_index(.x, .right)) { if (.p(.f(.x[[i]], ...))) { return(.x[[i]]) } } NULL } detect_index <- function(.x, .f, ..., .right = FALSE, .p = is_true) { .p <- as_function(.p, env = global_env()) .f <- as_function(.f, env = global_env()) for (i in .rlang_purrr_index(.x, .right)) { if (.p(.f(.x[[i]], ...))) { return(i) } } 0L } .rlang_purrr_index <- function(x, right = FALSE) { idx <- seq_along(x) if (right) { idx <- rev(idx) } idx } # nocov end DBItest/R/spec-driver-data-type.R0000644000176200001440000000666514602061640016221 0ustar liggesusers#' spec_driver_data_type #' @family driver specifications #' @usage NULL #' @format NULL #' @keywords NULL #' @inherit test_data_type spec_driver_data_type <- list( data_type_formals = function() { # expect_equal(names(formals(dbDataType)), c("dbObj", "obj", "...")) }, # data_type_driver = function(ctx) { test_data_type(ctx, ctx$drv) }, # NULL ) #' test_data_type #' @param ctx,dbObj Arguments to internal test function #' @keywords internal test_data_type <- function(ctx, dbObj) { #' @return #' `dbDataType()` returns the SQL type that corresponds to the `obj` argument check_data_type <- function(value) { eval(bquote({ #' as a non-empty expect_match(dbDataType(dbObj, .(value)), ".") #' character string. if (!is.data.frame(value)) { expect_equal(length(dbDataType(dbObj, .(value))), 1L) } else { #' For data frames, a character vector with one element per column #' is returned. expect_equal(length(dbDataType(dbObj, value)), .(ncol(value))) } expect_type(dbDataType(dbObj, .(value)), "character") expect_visible(dbDataType(dbObj, .(value))) })) } #' #' @section Failure modes: #' An error is raised for invalid values for the `obj` argument such as a #' `NULL` value. expect_error(dbDataType(dbObj, NULL)) #' @section Specification: #' The backend can override the [dbDataType()] generic #' for its driver class. #' #' This generic expects an arbitrary object as second argument. #' To query the values returned by the default implementation, #' run `example(dbDataType, package = "DBI")`. #' If the backend needs to override this generic, #' it must accept all basic R data types as its second argument, namely expect_has_data_type <- function(value) { eval(bquote( expect_error(check_data_type(.(value)), NA) )) } expected_data_types <- list( #' [logical], logical(1), #' [integer], integer(1), #' [numeric], numeric(1), #' [character], character(1), #' dates (see [Dates]), Sys.Date(), #' date-time (see [DateTimeClasses]), Sys.time(), #' and [difftime]. Sys.time() - Sys.time(), #' If the database supports blobs, if (!isTRUE(ctx$tweaks$omit_blob_tests)) { #' this method also must accept lists of [raw] vectors, list(as.raw(0:10)) }, if (!isTRUE(ctx$tweaks$omit_blob_tests)) { #' and [blob::blob] objects. blob::blob(as.raw(0:10)) } ) map( compact(expected_data_types), expect_has_data_type ) expect_has_data_type(data.frame(a = 1, b = "2", stringsAsFactors = FALSE)) #' As-is objects (i.e., wrapped by [I()]) must be #' supported and return the same results as their unwrapped counterparts. map( compact(expected_data_types), function(value) { if (!is.null(value)) { eval(bquote( expect_error( expect_identical( dbDataType(dbObj, I(.(value))), dbDataType(dbObj, .(value)) ), NA ) )) } } ) #' The SQL data type for [factor] and expect_identical( dbDataType(dbObj, letters), dbDataType(dbObj, factor(letters)) ) #' [ordered] is the same as for character. expect_identical( dbDataType(dbObj, letters), dbDataType(dbObj, ordered(letters)) ) #' The behavior for other object types is not specified. } DBItest/R/spec-stress.R0000644000176200001440000000007514602017371014352 0ustar liggesusers#' @format NULL spec_stress <- c( spec_stress_connection ) DBItest/R/DBItest.R0000644000176200001440000000057014602017371013375 0ustar liggesusers#' @details #' The two most important functions are [make_context()] and #' [test_all()]. The former tells the package how to connect to your #' DBI backend, the latter executes all tests of the test suite. More #' fine-grained test functions (all with prefix `test_`) are available. #' #' See the package's vignette for more details. #' #' @author Kirill Müller "_PACKAGE" DBItest/R/spec-sql-create-table.R0000644000176200001440000002320214602020561016144 0ustar liggesusers#' spec_sql_create_table #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_create_table <- list( create_table_formals = function() { # expect_equal(names(formals(dbCreateTable)), c("conn", "name", "fields", "...", "row.names", "temporary")) }, create_table_return = function(con, table_name) { #' @return #' `dbCreateTable()` returns `TRUE`, invisibly. expect_invisible_true(dbCreateTable(con, table_name, trivial_df())) }, #' create_table_overwrite = function(con, table_name) { #' @section Failure modes: #' If the table exists, an error is raised; the remote table remains unchanged. test_in <- trivial_df() dbCreateTable(con, table_name, test_in) dbAppendTable(con, table_name, test_in) expect_error(dbCreateTable(con, table_name, data.frame(b = 1L))) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, #' create_table_closed_connection = function(ctx, closed_con) { #' An error is raised when calling this method for a closed expect_error(dbCreateTable(closed_con, "test", data.frame(a = 1))) }, create_table_invalid_connection = function(ctx, invalid_con) { #' or invalid connection. expect_error(dbCreateTable(invalid_con, "test", data.frame(a = 1))) }, create_table_error = function(ctx, con, table_name) { #' An error is also raised test_in <- data.frame(a = 1L) #' if `name` cannot be processed with [dbQuoteIdentifier()] or expect_error(dbCreateTable(con, NA, test_in)) #' if this results in a non-scalar. expect_error(dbCreateTable(con, c(table_name, table_name), test_in)) #' Invalid values for the `row.names` and `temporary` arguments #' (non-scalars, expect_error(dbCreateTable(con, table_name, test_in, row.names = letters)) expect_error(dbCreateTable(con, table_name, test_in, temporary = c(TRUE, FALSE))) #' unsupported data types, expect_error(dbCreateTable(con, table_name, test_in, row.names = list(1L))) expect_error(dbCreateTable(con, table_name, fields = 1L)) expect_error(dbCreateTable(con, table_name, test_in, temporary = 1L)) #' `NA`, expect_error(dbCreateTable(con, table_name, test_in, row.names = NA)) expect_error(dbCreateTable(con, table_name, fields = NA)) expect_error(dbCreateTable(con, table_name, test_in, temporary = NA)) #' incompatible values, expect_error(dbCreateTable(con, table_name, test_in, fields = letters)) #' duplicate names) expect_error(dbCreateTable(con, table_name, fields = c(a = "INTEGER", a = "INTEGER"))) #' also raise an error. }, #' @section Additional arguments: #' The following arguments are not part of the `dbCreateTable()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `temporary` (default: `FALSE`) #' #' They must be provided as named arguments. #' See the "Specification" and "Value" sections for details on their usage. create_table_name = function(ctx, con) { #' @section Specification: #' The `name` argument is processed as follows, #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- trivial_df() local_remove_test_table(con, table_name) #' - If an unquoted table name as string: `dbCreateTable()` will do the quoting, dbCreateTable(con, table_name, test_in) test_out <- check_df(dbReadTable(con, dbQuoteIdentifier(con, table_name))) expect_equal_df(test_out, test_in[0, , drop = FALSE]) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` } }, create_table_name_quoted = function(ctx, con) { #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done skip_if_not_dbitest(ctx, "1.7.2") if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- trivial_df() local_remove_test_table(con, table_name) dbCreateTable(con, dbQuoteIdentifier(con, table_name), test_in) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in[0, , drop = FALSE]) } }, create_table_value_df = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.9") #' #' The `value` argument can be: #' - a data frame, table_name <- "ct_df" local_remove_test_table(con, table_name) df <- data.frame(a = 1) dbCreateTable(con, table_name, df) expect_equal_df(dbReadTable(con, table_name), data.frame(a = numeric())) }, create_table_value_array = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.10") #' - a named list of SQL types table_name <- "ct_array" local_remove_test_table(con, table_name) array <- list(a = "NUMERIC") dbCreateTable(con, table_name, array) expect_equal_df(dbReadTable(con, table_name), data.frame(a = numeric())) }, #' create_table_temporary_1 = function(ctx, con, table_name = "dbit03") { #' If the `temporary` argument is `TRUE`, the table is not available in a #' second connection and is gone after reconnecting. #' Not all backends support this argument. if (!isTRUE(ctx$tweaks$temporary_tables)) { skip("tweak: temporary_tables") } penguins <- get_penguins(ctx) dbCreateTable(con, table_name, penguins, temporary = TRUE) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins[0, , drop = FALSE]) con2 <- local_connection(ctx) expect_error(dbReadTable(con2, table_name)) }, # second stage create_table_temporary_2 = function(ctx, con) { if (!isTRUE(ctx$tweaks$temporary_tables)) { skip("tweak: temporary_tables") } # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit03" expect_error(dbReadTable(con, table_name)) }, create_table_visible_in_other_connection_1 = function(ctx, local_con) { #' A regular, non-temporary table is visible in a second connection, penguins <- get_penguins(ctx) # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit04" dbCreateTable(local_con, table_name, penguins) penguins_out <- check_df(dbReadTable(local_con, table_name)) expect_equal_df(penguins_out, penguins[0, , drop = FALSE]) con2 <- local_connection(ctx) expect_equal_df(dbReadTable(con2, table_name), penguins[0, , drop = FALSE]) }, # second stage create_table_visible_in_other_connection_2 = function(ctx, con) { penguins <- get_penguins(ctx) # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit04" #' in a pre-existing connection, expect_equal_df(check_df(dbReadTable(con, table_name)), penguins[0, , drop = FALSE]) }, # third stage create_table_visible_in_other_connection_3 = function(ctx, local_con, table_name = "dbit04") { penguins <- get_penguins(ctx) #' and after reconnecting to the database. expect_equal_df(check_df(dbReadTable(local_con, table_name)), penguins[0, , drop = FALSE]) }, #' create_roundtrip_keywords = function(ctx, con) { #' SQL keywords can be used freely in table names, column names, and data. tbl_in <- data.frame( select = "unique", from = "join", where = "order", stringsAsFactors = FALSE ) test_table_roundtrip(con, tbl_in, name = "exists", use_append = TRUE) }, create_roundtrip_quotes = function(ctx, con) { #' Quotes, commas, and spaces can also be used for table names and column names, #' if the database supports non-syntactic identifiers. if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } table_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "," ) for (table_name in table_names) { tbl_in <- trivial_df(4, table_names) test_table_roundtrip(con, tbl_in, use_append = TRUE) } }, #' create_table_row_names_default = function(ctx, con, table_name) { #' The `row.names` argument must be missing mtcars_in <- datasets::mtcars dbCreateTable(con, table_name, mtcars_in) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_false("row_names" %in% names(mtcars_out)) expect_equal_df(mtcars_out, unrowname(mtcars_in)[0, , drop = FALSE]) }, create_table_row_names_null = function(ctx, con, table_name) { #' or `NULL`, the default value. mtcars_in <- datasets::mtcars dbCreateTable(con, table_name, mtcars_in, row.names = NULL) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = NULL)) expect_false("row_names" %in% names(mtcars_out)) expect_equal_df(mtcars_out, unrowname(mtcars_in)[0, , drop = FALSE]) }, # create_table_row_names_non_null = function(ctx, con, table_name) { #' All other values for the `row.names` argument mtcars_in <- datasets::mtcars #' (in particular `TRUE`, expect_error(dbCreateTable(con, table_name, mtcars_in, row.names = TRUE)) #' `NA`, expect_error(dbCreateTable(con, table_name, mtcars_in, row.names = NA)) #' and a string) expect_error(dbCreateTable(con, table_name, mtcars_in, row.names = "make_model")) #' raise an error. }, # NULL ) DBItest/R/utf8.R0000644000176200001440000000053114602017371012762 0ustar liggesuserstext_cyrillic <- "\u041a\u0438\u0440\u0438\u043b\u043b" text_latin <- "M\u00fcller" text_latin_encoded <- iconv(text_latin, from = "UTF-8", to = "latin1") text_chinese <- "\u6211\u662f\u8c01" text_ascii <- iconv("ASCII", to = "ASCII") get_texts <- function() { c(text_cyrillic, text_latin, text_latin_encoded, text_chinese, text_ascii) } DBItest/R/s4.R0000644000176200001440000000234514602017371012427 0ustar liggesusers# http://stackoverflow.com/a/39880324/946850 s4_methods <- function(env, pkg_fun = NULL) { generics <- methods::getGenerics(env) if (is.null(pkg_fun)) { ok <- TRUE } else { ok <- pkg_fun(generics@package) } res <- Map( generics@.Data[ok], generics@package[ok], USE.NAMES = TRUE, f = function(name, package) { what <- methods::methodsPackageMetaName("T", paste(name, package, sep = ":")) table <- get(what, envir = env) mget(ls(table, all.names = TRUE), envir = table) } ) unlist(res, recursive = FALSE) } s4_real_argument_names <- function(s4_method) { expect_s4_class(s4_method, "function") expect_s4_class(s4_method, "MethodDefinition") unwrapped <- s4_unwrap(s4_method) names(formals(unwrapped)) } s4_unwrap <- function(s4_method) { # Only unwrap if body is of the following form: # { # .local <- function(x, y, z, ...) { # ... # } # ... # } method_body <- body(s4_method) if (inherits(method_body, "{")) { local_def <- method_body[[2]] if (inherits(local_def, "<-") && local_def[[2]] == quote(.local)) { local_fun <- local_def[[3]] if (inherits(local_fun, "function")) { return(local_fun) } } } s4_method } DBItest/R/zzz.R0000644000176200001440000000213314602017371012731 0ustar liggesusers.onLoad <- function(libname, pkgname) { if (is_installed("debugme")) { # Necessary to re-parse environment variable get(".onLoad", asNamespace("debugme"))(libname, pkgname) debugme::debugme() } debug_info() } activate_debugme <- function(bangs = 2) { old_debugme <- remove_from_logging(get_debugme()) old_debugme <- gsub("(.)$", "\\1,", old_debugme) my_debugme <- paste0(strrep("!", bangs), get_pkgname()) set_debugme(paste0(old_debugme, my_debugme)) } deactivate_debugme <- function() { new_debugme <- remove_from_logging(get_debugme()) set_debugme(new_debugme) } get_debugme <- function() { Sys.getenv("DEBUGME") } set_debugme <- function(debugme) { Sys.setenv("DEBUGME" = debugme) message("DEBUGME=", debugme) } remove_from_logging <- function(spec) { spec <- gsub(paste0("!*", get_pkgname(), ""), "", spec) spec <- gsub(",,+", ",", spec) spec } debug_info <- function(pkgname) { "!DEBUG `get_pkgname()` loaded" "!!DEBUG Two bangs" "!!!DEBUG Three bangs" "!!!!DEBUG Four bangs" } get_pkgname <- function() { environmentName(topenv(environment())) } DBItest/R/test-compliance.R0000644000176200001440000000064114602017371015165 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_compliance()]: #' Test full compliance to DBI NULL #' Test full compliance to DBI #' #' @inheritParams test_all #' @include test-arrow.R #' @family tests #' @export test_compliance <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Full compliance" run_tests(ctx, spec_compliance, skip, run_only, test_suite) } DBItest/R/spec-result-execute.R0000644000176200001440000000604314602017371016006 0ustar liggesusers#' spec_result_execute #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_execute <- list( execute_formals = function() { # expect_equal(names(formals(dbExecute)), c("conn", "statement", "...")) }, execute_atomic = function(ctx, con, table_name) { #' @return #' `dbExecute()` always returns a query <- trivial_statement(ctx, table_name) ret <- dbExecute(con, query) #' scalar expect_equal(length(ret), 1) #' numeric expect_true(is.numeric(ret)) #' that specifies the number of rows affected #' by the statement. }, #' execute_closed_connection = function(ctx, closed_con) { #' @section Failure modes: #' An error is raised when issuing a statement over a closed table_name <- "dbit12" expect_error(dbExecute(closed_con, trivial_statement(ctx, table_name = table_name))) }, execute_invalid_connection = function(ctx, invalid_con) { #' or invalid connection, table_name <- "dbit13" expect_error(dbExecute(invalid_con, trivial_statement(ctx, table_name = table_name))) }, execute_syntax_error = function(con) { #' if the syntax of the statement is invalid, expect_error(dbExecute(con, "CREATTE")) }, execute_non_string = function(con) { #' or if the statement is not a non-`NA` string. expect_error(dbExecute(con, character())) expect_error(dbExecute(con, letters)) expect_error(dbExecute(con, NA_character_)) }, #' @section Additional arguments: #' The following arguments are not part of the `dbExecute()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `params` (default: `NULL`) #' - `immediate` (default: `NULL`) #' #' They must be provided as named arguments. #' See the "Specification" sections for details on their usage. execute_params = function(ctx, con) { #' @section Specification: #' #' The `param` argument allows passing query parameters, see [dbBind()] for details. placeholder_funs <- get_placeholder_funs(ctx) for (placeholder_fun in placeholder_funs) { table_name <- random_table_name() local_remove_test_table(con, table_name) dbWriteTable(con, table_name, data.frame(a = as.numeric(1:3))) placeholder <- placeholder_fun(1) query <- paste0("DELETE FROM ", table_name, " WHERE a > ", placeholder) values <- 1.5 params <- stats::setNames(list(values), names(placeholder)) rc <- dbExecute(con, query, params = params) if (isTRUE(ctx$tweaks$allow_na_rows_affected)) { expect_true((is.na(rc) && is.numeric(rc)) || rc == 2L, info = placeholder) } else { expect_equal(rc, 2L, info = placeholder) } } }, execute_immediate = function(ctx, con, table_name) { #' @inheritSection spec_result_get_query Specification for the `immediate` argument res <- expect_visible(dbExecute(con, trivial_statement(ctx, table_name), immediate = TRUE)) expect_true(is.numeric(res)) }, # NULL ) DBItest/R/test-connection.R0000644000176200001440000000073714602017371015220 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_connection()]: #' Test the "Connection" class NULL #' Test the "Connection" class #' #' @inheritParams test_all #' @include test-driver.R #' @family tests #' @importFrom withr with_temp_libpaths #' @importFrom methods is #' @export test_connection <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Connection" run_tests(ctx, spec_connection, skip, run_only, test_suite) } DBItest/R/spec-result.R0000644000176200001440000000272114602017371014345 0ustar liggesusers#' @format NULL spec_result <- c( spec_result_send_query, spec_result_fetch, spec_result_clear_result, spec_result_get_query, spec_result_send_statement, spec_result_execute, spec_result_create_table_with_data_type, spec_result_roundtrip, # NULL ) # Helpers ----------------------------------------------------------------- sql_union <- function(..., .order_by = NULL, .ctx) { queries <- c(...) if (length(queries) == 1) { query <- queries } else { stopifnot(!is.null(.ctx)) query <- .ctx$tweaks$union(queries) } if (!is.null(.order_by)) { query <- paste0(query, " ORDER BY ", .order_by) } query } trivial_statement <- function(ctx, table_name) { ctx$tweaks$create_table_as(table_name, trivial_query()) } trivial_query <- function(n = 1L, column = "a", .order_by = NULL, .ctx = NULL) { # Zero-row queries are hard-coded, search for 1 = 0 stopifnot(n > 0) value <- trivial_values(n) if (length(column) == n) { query <- paste0("SELECT ", paste0(value, " AS ", column, collapse = ", ")) } else { query <- sql_union( paste0("SELECT ", value, " AS ", column), .order_by = .order_by, .ctx = .ctx ) } query } trivial_values <- function(n = 1L) { seq_len(n) + 0.5 } trivial_df <- function(n = 1L, column = "a") { values <- trivial_values(n) if (length(column) == 1) { df <- data.frame(a = values) } else { df <- as.data.frame(as.list(values)) } names(df) <- column df } DBItest/R/import-dbi.R0000644000176200001440000000002414602017371014137 0ustar liggesusers#' @import DBI NULL DBItest/R/spec-meta-get-statement.R0000644000176200001440000000233014602020561016523 0ustar liggesusers#' spec_meta_get_statement #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_get_statement <- list( get_statement_formals = function() { # expect_equal(names(formals(dbGetStatement)), c("res", "...")) }, get_statement_query = function(con) { #' @return #' `dbGetStatement()` returns a string, the query used in query <- trivial_query() #' either [dbSendQuery()] or res <- local_result(dbSendQuery(con, query)) s <- dbGetStatement(res) expect_type(s, "character") expect_identical(s, query) }, # get_statement_statement = function(con, table_name) { query <- paste0("CREATE TABLE ", table_name, " (a integer)") #' [dbSendStatement()]. res <- local_result(dbSendStatement(con, query)) s <- dbGetStatement(res) expect_type(s, "character") expect_identical(s, query) }, #' get_statement_error = function(con) { #' @section Failure modes: res <- dbSendQuery(con, trivial_query()) dbClearResult(res) #' Attempting to query the statement for a result set cleared with #' [dbClearResult()] gives an error. expect_error(dbGetStatement(res)) }, # NULL ) DBItest/R/spec-transaction.R0000644000176200001440000000020314602017371015345 0ustar liggesusers#' @format NULL spec_transaction <- c( spec_transaction_begin_commit_rollback, spec_transaction_with_transaction, # NULL ) DBItest/R/spec-connection-disconnect.R0000644000176200001440000000212214602214416017307 0ustar liggesusers#' spec_connection_disconnect #' @family connection specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_connection_disconnect <- list( disconnect_formals = function() { # expect_equal(names(formals(dbDisconnect)), c("conn", "...")) }, can_disconnect = function(ctx) { #' @return con <- connect(ctx) #' `dbDisconnect()` returns `TRUE`, invisibly. expect_invisible_true(dbDisconnect(con)) }, #' #' @section Failure modes: #' A warning is issued on garbage collection when a connection has been #' released without calling `dbDisconnect()`, #' but this cannot be tested automatically. disconnect_closed_connection = function(ctx, closed_con) { #' At least one warning is issued immediately when calling `dbDisconnect()` on an #' already disconnected suppressWarnings(expect_warning(dbDisconnect(closed_con))) }, disconnect_invalid_connection = function(ctx, invalid_con) { #' or invalid connection. suppressWarnings(expect_warning(dbDisconnect(invalid_con))) }, # NULL ) DBItest/R/spec-arrow-get-query-arrow.R0000644000176200001440000001332414602020561017225 0ustar liggesusers#' spec_arrow_get_query_arrow #' @family Arrow specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_arrow_get_query_arrow <- list( arrow_get_query_arrow_formals = function() { # expect_equal(names(formals(dbGetQueryArrow)), c("conn", "statement", "...")) }, arrow_get_query_arrow_atomic = function(con) { #' @return #' `dbGetQueryArrow()` always returns an object coercible to a [data.frame], with #' as many rows as records were fetched and as many #' columns as fields in the result set, #' even if the result is a single value query <- trivial_query() rows <- check_arrow(dbGetQueryArrow(con, query)) expect_equal(rows, data.frame(a = 1.5)) }, arrow_get_query_arrow_one_row = function(con) { #' or has one query <- trivial_query(3, letters[1:3]) result <- trivial_df(3, letters[1:3]) rows <- check_arrow(dbGetQueryArrow(con, query)) expect_identical(rows, result) }, arrow_get_query_arrow_zero_rows = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.12") #' or zero rows. # Not all SQL dialects seem to support the query used here. query <- "SELECT * FROM (SELECT 1 as a, 2 as b, 3 as c) AS x WHERE (1 = 0)" rows <- check_arrow(dbGetQueryArrow(con, query)) expect_identical(names(rows), letters[1:3]) expect_identical(dim(rows), c(0L, 3L)) }, #' arrow_get_query_arrow_closed_connection = function(ctx, closed_con) { #' @section Failure modes: #' An error is raised when issuing a query over a closed expect_error(dbGetQueryArrow(closed_con, trivial_query())) }, arrow_get_query_arrow_invalid_connection = function(ctx, invalid_con) { #' or invalid connection, expect_error(dbGetQueryArrow(invalid_con, trivial_query())) }, arrow_get_query_arrow_syntax_error = function(con) { #' if the syntax of the query is invalid, expect_error(dbGetQueryArrow(con, "SELLECT")) }, arrow_get_query_arrow_non_string = function(con) { #' or if the query is not a non-`NA` string. expect_error(dbGetQueryArrow(con, character())) expect_error(dbGetQueryArrow(con, letters)) expect_error(dbGetQueryArrow(con, NA_character_)) }, arrow_get_query_arrow_record_batch_reader = function(ctx, con) { #' The object returned by `dbGetQueryArrow()` can also be passed to #' [nanoarrow::as_nanoarrow_array_stream()] to create a nanoarrow #' array stream object that can be used to read the result set #' in batches. query <- trivial_query(25, .ctx = ctx, .order_by = "a") result <- trivial_df(25) stream <- dbGetQueryArrow(con, query) rbr <- nanoarrow::as_nanoarrow_array_stream(stream) #' The chunk size is implementation-specific. out <- as.data.frame(rbr$get_next()) expect_equal(out, head(result, nrow(out))) }, #' @section Additional arguments: #' The following arguments are not part of the `dbGetQueryArrow()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `params` (default: `NULL`) #' - `immediate` (default: `NULL`) #' #' They must be provided as named arguments. #' See the "Specification" and "Value" sections for details on their usage. #' arrow_get_query_arrow_params = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.1") #' The `param` argument allows passing query parameters, see [dbBind()] for details. placeholder_funs <- get_placeholder_funs(ctx) for (placeholder_fun in placeholder_funs) { placeholder <- placeholder_fun(1) query <- paste0("SELECT ", placeholder, " + 1.0 AS a") values <- trivial_values(3) - 1 params <- stats::setNames(list(values), names(placeholder)) ret <- dbGetQueryArrow(con, query, params = params) expect_equal(as.data.frame(ret), trivial_df(3), info = placeholder) } }, # arrow_get_query_arrow_immediate = function(ctx, con, table_name) { skip_if_not_dbitest(ctx, "1.8.0.2") #' @section Specification for the `immediate` argument: #' #' The `immediate` argument supports distinguishing between "direct" #' and "prepared" APIs offered by many database drivers. #' Passing `immediate = TRUE` leads to immediate execution of the #' query or statement, via the "direct" API (if supported by the driver). #' The default `NULL` means that the backend should choose whatever API #' makes the most sense for the database, and (if relevant) tries the #' other API if the first attempt fails. A successful second attempt #' should result in a message that suggests passing the correct #' `immediate` argument. #' Examples for possible behaviors: #' 1. DBI backend defaults to `immediate = TRUE` internally #' 1. A query without parameters is passed: query is executed #' 1. A query with parameters is passed: #' 1. `params` not given: rejected immediately by the database #' because of a syntax error in the query, the backend tries #' `immediate = FALSE` (and gives a message) #' 1. `params` given: query is executed using `immediate = FALSE` #' 1. DBI backend defaults to `immediate = FALSE` internally #' 1. A query without parameters is passed: #' 1. simple query: query is executed #' 1. "special" query (such as setting a config options): fails, #' the backend tries `immediate = TRUE` (and gives a message) #' 1. A query with parameters is passed: #' 1. `params` not given: waiting for parameters via [dbBind()] #' 1. `params` given: query is executed res <- expect_visible(dbGetQueryArrow(con, trivial_query(), immediate = TRUE)) check_arrow(res) }, # NULL ) DBItest/R/expectations.R0000644000176200001440000000510214602061640014577 0ustar liggesusersexpect_arglist_is_empty <- function(object) { act <- quasi_label(enquo(object), arg = "object") act$formals <- formals(act$val) expect( is.null(act$formals), sprintf("%s has an empty argument list.", act$lab) ) invisible(act$val) } expect_all_args_have_default_values <- function(object) { act <- quasi_label(enquo(object), arg = "object") act$args <- formals(act$val) act$args <- act$args[names(act$args) != "..."] act$char_args <- map_chr(act$args, as.character) expect( all(nzchar(act$char_args, keepNA = FALSE)), sprintf("%s has arguments without default values", act$lab) ) invisible(act$val) } has_method <- function(method_name) { function(x) { my_class <- class(x) expect_true( length(findMethod(method_name, my_class)) > 0L, paste("object of class", my_class, "has no", method_name, "method") ) } } expect_visible <- function(code) { ret <- withVisible(code) expect_true(ret$visible) ret$value } expect_invisible_true <- function(code) { ret <- withVisible(code) expect_true(ret$value) expect_false(ret$visible) invisible(ret$value) } expect_equal_df <- function(actual, expected) { factor_cols <- map_lgl(expected, is.factor) expected[factor_cols] <- map(expected[factor_cols], as.character) asis_cols <- map_lgl(expected, inherits, "AsIs") expected[asis_cols] <- map(expected[asis_cols], unclass) list_cols <- map_lgl(expected, is.list) if (!any(list_cols)) { order_actual <- do.call(order, actual) order_expected <- do.call(order, expected) } else { expect_false(all(list_cols)) expect_equal(anyDuplicated(actual[!list_cols]), 0) expect_equal(anyDuplicated(expected[!list_cols]), 0) order_actual <- do.call(order, actual[!list_cols]) order_expected <- do.call(order, expected[!list_cols]) } has_rownames_actual <- is.character(attr(actual, "row.names")) has_rownames_expected <- is.character(attr(expected, "row.names")) expect_equal(has_rownames_actual, has_rownames_expected) if (has_rownames_actual) { expect_equal(sort(row.names(actual)), sort(row.names(expected))) } actual <- unrowname(actual[order_actual, ]) expected <- unrowname(expected[order_expected, ]) expect_identical(actual, expected) } expect_equal_arrow <- function(actual, expected) { expect_equal_df(as.data.frame(actual), as.data.frame(expected)) } skip_if_not_dbitest <- function(ctx, version) { if (as.package_version(ctx$tweaks$dbitest_version) < version) { skip(paste0("tweak: dbitest_version: required: ", version, ", available: ", ctx$tweaks$dbitest_version)) } } DBItest/R/import-testthat.R0000644000176200001440000000037014602020561015240 0ustar liggesusers#' @rawNamespace import(testthat, except = c(is_null, is_false, is_true)) #' @import rlang NULL #' @importFrom methods findMethod getClasses getClass extends #' @importFrom stats setNames #' @importFrom utils head #' @importFrom magrittr %>% NULL DBItest/R/spec-meta-bind-arrow.R0000644000176200001440000011565414725054146016040 0ustar liggesusers# Generated by helper-dev.R, do not edit by hand # Sources: R/spec-meta-bind-.R, R/spec-meta-bind-expr.R, R/spec-meta-bind-runner.R # This file is generated during load_all() if it's older than the sources spec_meta_arrow_bind <- list( arrow_bind_return_value = function(ctx, con) { # # @return # `dbBind()` returns the result set, # invisibly, # for queries issued by [dbSendQuery()] or [dbSendQueryArrow()] and placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbFetch(res)) expect_equal(dbGetRowCount(res), 0) expect_true(dbIsValid(res)) expect_false(dbHasCompleted(res)) bind_res <- withVisible(dbBind(res, bind_values)) expect_identical(res, bind_res$value) expect_false(bind_res$visible) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_too_many = function(ctx, con) { # # @section Failure modes: # Binding too many placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- if (is.null(names(bind_values))) { c(bind_values, bind_values[[1L]]) } else { c(bind_values, bogus = bind_values[[1L]]) } placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_not_enough = function(ctx, con) { # # or not enough values, placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1:2 placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) bind_values_patched <- bind_values[-1L] placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_wrong_name = function(ctx, con) { # # or parameters with wrong names placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- stats::setNames(bind_values, paste0("bogus", names(bind_values))) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_multi_row_unequal_length = function(ctx, con) { # # or unequal length, # also raises an error. placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check allow_na_rows_affected <- ctx$tweaks$allow_na_rows_affected for (placeholder_fun in placeholder_funs) { bind_values <- list(1:3, 2:4) placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) bind_values_patched <- { bind_values[[2]] <- bind_values[[2]][-1] bind_values } data <- data.frame(a = rep(1:5, 1:5), b = 1:15) table_name <- random_table_name() dbWriteTable(con, table_name, data, temporary = TRUE) sql <- paste0("UPDATE ", dbQuoteIdentifier(con, table_name), " SET b = b + 1 WHERE ") sql <- paste0(sql, "a = ", placeholder[[1L]], " AND ") sql <- paste0(sql, "b = ", placeholder[[2L]]) res <- dbSendStatement(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_named_param_unnamed_placeholders = function(ctx, con) { # # If the placeholders in the query are named, # all parameter values must have names placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- stats::setNames(bind_values, NULL) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_named_param_empty_placeholders = function(ctx, con) { # # (which must not be empty placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- list(1L, 2L) placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) bind_values_patched <- { names(bind_values)[[1]] <- "" bind_values } placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_unnamed_param_named_placeholders = function(ctx, con) { # # and vice versa, # otherwise an error is raised. placeholder_funs <- get_placeholder_funs(ctx, requires_names = FALSE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- stats::setNames(bind_values, letters[seq_along(bind_values)]) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_premature_clear = function(ctx, con) { # # Calling `dbBind()` on a result set already cleared by [dbClearResult()] # also raises an error. placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) dbClearResult(res) expect_error(dbBind(res, bind_values), ".*") } }, arrow_bind_multi_row = function(ctx, con) { # # @section Specification: # The elements of the `params` argument do not need to be scalars, # vectors of arbitrary length placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- list(1:3) placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 3L) result <- data.frame(a = c(1.5, 2.5, 2.5)) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_multi_row_zero_length = function(ctx, con) { # # (including length 0) # are supported. # For queries, calling `dbFetch()` binding such parameters returns # concatenated results, equivalent to binding and fetching for each set # of values and connecting via [rbind()]. skip_if_not_dbitest(ctx, "1.7.99.12") placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- list(integer(0), integer(0)) placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 0L) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_repeated = function(ctx, con) { # # `dbBind()` also accepts repeated calls on the same result set # for both queries placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_repeated_untouched = function(ctx, con) { # # even if no results are fetched between calls to `dbBind()`, # for both queries placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_named_param_shuffle = function(ctx, con) { # # If the placeholders in the query are named, # their order in the `params` argument is not important. placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- c(1.5, 2.5, 3.5, NA) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) bind_values_patched <- bind_values[c(3, 1, 2, 4)] placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values_patched) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_integer = function(ctx, con) { # # At least the following data types are accepted on input (including [NA]): # - [integer] placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- c(1L, 2L, 3L, NA) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_numeric = function(ctx, con) { # # - [numeric] placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- c(1.5, 2.5, 3.5, NA) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_logical = function(ctx, con) { # # - [logical] for Boolean values placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- c(TRUE, FALSE, NA) placeholder <- placeholder_fun(3L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[3L] <- paste0("(", is_null_check(placeholder[3L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_character = function(ctx, con) { # # - [character] placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- c("\U{41A}\U{438}\U{440}\U{438}\U{43B}\U{43B}", "M\U{FC}ller", `Encoding<-`("M\xfcller", "latin1"), "\U{6211}\U{662F}\U{8C01}", "ASCII", NA) placeholder <- placeholder_fun(6L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[6L] <- paste0("(", is_null_check(placeholder[6L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d, ") sql <- paste0(sql, "CASE WHEN ", result_check[[5L]], " THEN 1.5 ELSE 2.5 END AS e, ") sql <- paste0(sql, "CASE WHEN ", result_check[[6L]], " THEN 1.5 ELSE 2.5 END AS f") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5, e = 1.5, f = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_character_escape = function(ctx, con) { # # (also with special characters such as spaces, newlines, quotes, and backslashes) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- c(" ", "\n", "\r", "\b", "'", '"', "[", "]", "\\", NA) placeholder <- placeholder_fun(10L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[10L] <- paste0("(", is_null_check(placeholder[10L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d, ") sql <- paste0(sql, "CASE WHEN ", result_check[[5L]], " THEN 1.5 ELSE 2.5 END AS e, ") sql <- paste0(sql, "CASE WHEN ", result_check[[6L]], " THEN 1.5 ELSE 2.5 END AS f, ") sql <- paste0(sql, "CASE WHEN ", result_check[[7L]], " THEN 1.5 ELSE 2.5 END AS g, ") sql <- paste0(sql, "CASE WHEN ", result_check[[8L]], " THEN 1.5 ELSE 2.5 END AS h, ") sql <- paste0(sql, "CASE WHEN ", result_check[[9L]], " THEN 1.5 ELSE 2.5 END AS i, ") sql <- paste0(sql, "CASE WHEN ", result_check[[10L]], " THEN 1.5 ELSE 2.5 END AS j") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5, e = 1.5, f = 1.5, g = 1.5, h = 1.5, i = 1.5, j = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_factor = function(ctx, con) { # # - [factor] (bound as character, # with warning) skip_if_not_dbitest(ctx, "1.7.99.13") placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- list(factor("\U{41A}\U{438}\U{440}\U{438}\U{43B}\U{43B}"), factor("M\U{FC}ller"), factor(`Encoding<-`("M\xfcller", "latin1")), factor("\U{6211}\U{662F}\U{8C01}"), factor("ASCII"), factor(NA_character_)) placeholder <- placeholder_fun(6L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[6L] <- paste0("(", is_null_check(placeholder[6L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d, ") sql <- paste0(sql, "CASE WHEN ", result_check[[5L]], " THEN 1.5 ELSE 2.5 END AS e, ") sql <- paste0(sql, "CASE WHEN ", result_check[[6L]], " THEN 1.5 ELSE 2.5 END AS f") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) suppressWarnings(expect_warning(dbBind(res, bind_values))) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5, e = 1.5, f = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_date = function(ctx, con) { # # - [Date] skip_if(!isTRUE(ctx$tweaks$date_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- as.Date(c("2023-12-17", "2023-12-18", "2023-12-19", NA)) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_date_integer = function(ctx, con) { # # (also when stored internally as integer) skip_if(!isTRUE(ctx$tweaks$date_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(c(18618L, 18619L, 18620L, NA), class = "Date") placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_timestamp = function(ctx, con) { # # - [POSIXct] timestamps skip_if(!isTRUE(ctx$tweaks$timestamp_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- as.POSIXct(c("2023-12-17 02:40:22", "2023-12-17 02:40:23", "2023-12-17 02:40:24", NA)) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_timestamp_lt = function(ctx, con) { # # - [POSIXlt] timestamps skip_if(!isTRUE(ctx$tweaks$timestamp_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- list(as.POSIXlt("2023-12-17 02:40:49"), as.POSIXlt("2023-12-17 02:40:50"), as.POSIXlt("2023-12-17 02:40:51"), as.POSIXlt(NA_character_)) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_time_seconds = function(ctx, con) { # # - [difftime] values skip_if(!isTRUE(ctx$tweaks$time_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- as.difftime(c(1, 2, 3, NA), units = "secs") placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_time_hours = function(ctx, con) { # # (also with units other than seconds skip_if(!isTRUE(ctx$tweaks$time_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- as.difftime(c(1, 2, 3, NA), units = "hours") placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_time_minutes_integer = function(ctx, con) { # # and with the value stored as integer) skip_if(!isTRUE(ctx$tweaks$time_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- as.difftime(c(1, 2, 3, NA), units = "mins") placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_raw = function(ctx, con) { # # - lists of [raw] for blobs (with `NULL` entries for SQL NULL values) skip_if_not_dbitest(ctx, "1.7.99.14") skip_if(isTRUE(ctx$tweaks$omit_blob_tests)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check cast_fun <- ctx$tweaks$blob_cast for (placeholder_fun in placeholder_funs) { bind_values <- list(list(as.raw(c(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))), list(raw(3)), list(NULL)) placeholder <- placeholder_fun(3L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", cast_fun(placeholder), " = ", placeholder_values, ")") result_check[3L] <- paste0("(", is_null_check(cast_fun(placeholder)[3L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_bind_blob = function(ctx, con) { # # - objects of type [blob::blob] skip_if(isTRUE(ctx$tweaks$omit_blob_tests)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check cast_fun <- ctx$tweaks$blob_cast for (placeholder_fun in placeholder_funs) { bind_values <- list( blob::blob(as.raw(c(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))), blob::blob(raw(3)), structure(vctrs::list_of(NULL, .ptype = raw(0)), class = c("blob", "vctrs_list_of", "vctrs_vctr", "list")) ) placeholder <- placeholder_fun(3L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", cast_fun(placeholder), " = ", placeholder_values, ")") result_check[3L] <- paste0("(", is_null_check(cast_fun(placeholder)[3L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, NULL ) DBItest/R/test_backend.R0000644000176200001440000000162514602017371014527 0ustar liggesuserstest_backend <- function(target, reporter = NULL) { target <- sub("^test-", "", target) message("Target: ", target) rx <- "^([^-]+)-(.*)$" # odbc if (grepl(rx, target)) { message("ODBC detected") pkg <- sub(rx, "\\1", target) message("pkg: ", pkg) driver <- sub(rx, "\\2", target) message("driver: ", driver) filter <- paste0("driver-", driver) message("filter: ", filter) dsn <- toupper(gsub("-", "", driver)) message("dsn: ", dsn) cs <- paste0("dsn=", dsn) if (filter == "driver-sql-server") { cs <- paste0(cs, ";UID=SA;PWD=Password12") } names(cs) <- paste0("ODBC_CS_", dsn) do.call(Sys.setenv, as.list(cs)) } else { pkg <- target filter <- "DBItest" } local_options(crayon.enabled = TRUE) pkgload::load_all("..") testthat::test_local(pkg, filter = paste0("^", filter, "$"), stop_on_failure = TRUE, reporter = reporter) } DBItest/R/spec-meta-bind-stream.R0000644000176200001440000013447714725054147016206 0ustar liggesusers# Generated by helper-dev.R, do not edit by hand # Sources: R/spec-meta-bind-.R, R/spec-meta-bind-expr.R, R/spec-meta-bind-runner.R # This file is generated during load_all() if it's older than the sources spec_meta_stream_bind <- list( stream_bind_return_value = function(ctx, con) { # # @return # `dbBind()` returns the result set, # invisibly, # for queries issued by [dbSendQuery()] or [dbSendQueryArrow()] and placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbFetch(res)) expect_equal(dbGetRowCount(res), 0) expect_true(dbIsValid(res)) expect_false(dbHasCompleted(res)) bind_res <- withVisible(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values))) expect_identical(res, bind_res$value) expect_false(bind_res$visible) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_return_value_statement = function(ctx, con) { # # also for data manipulation statements issued by # [dbSendStatement()]. placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check allow_na_rows_affected <- ctx$tweaks$allow_na_rows_affected for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) data <- data.frame(a = rep(1:5, 1:5), b = 1:15) table_name <- random_table_name() dbWriteTable(con, table_name, data, temporary = TRUE) sql <- paste0("UPDATE ", dbQuoteIdentifier(con, table_name), " SET b = b + 1 WHERE ") sql <- paste0(sql, "a = ", placeholder[[1L]]) res <- dbSendStatement(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_identical(dbGetRowsAffected(res), NA_integer_) expect_true(dbIsValid(res)) expect_false(dbHasCompleted(res)) bind_res <- withVisible(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values))) expect_identical(res, bind_res$value) expect_false(bind_res$visible) rows_affected <- dbGetRowsAffected(res) if (!isTRUE(allow_na_rows_affected) || !is.na(rows_affected)) { expect_equal(rows_affected, 1L) } expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_too_many = function(ctx, con) { # # @section Failure modes: # Binding too many placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- if (is.null(names(bind_values))) { c(bind_values, bind_values[[1L]]) } else { c(bind_values, bogus = bind_values[[1L]]) } placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_not_enough = function(ctx, con) { # # or not enough values, placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, 2L, check.names = FALSE), names = c("", "")) placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) bind_values_patched <- bind_values[-1L] placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_wrong_name = function(ctx, con) { # # or parameters with wrong names placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- stats::setNames(bind_values, paste0("bogus", names(bind_values))) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_named_param_unnamed_placeholders = function(ctx, con) { # # If the placeholders in the query are named, # all parameter values must have names placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- stats::setNames(bind_values, NULL) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_named_param_empty_placeholders = function(ctx, con) { # # (which must not be empty placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, 2L, check.names = FALSE), names = c("", "")) placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) bind_values_patched <- { names(bind_values)[[1]] <- "" bind_values } placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_named_param_na_placeholders = function(ctx, con) { # # or `NA`), placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, 2L, check.names = FALSE), names = c("", "")) placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) bind_values_patched <- { names(bind_values)[[1]] <- NA bind_values } placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_unnamed_param_named_placeholders = function(ctx, con) { # # and vice versa, # otherwise an error is raised. placeholder_funs <- get_placeholder_funs(ctx, requires_names = FALSE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- stats::setNames(bind_values, letters[seq_along(bind_values)]) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_premature_clear = function(ctx, con) { # # Calling `dbBind()` on a result set already cleared by [dbClearResult()] # also raises an error. placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) dbClearResult(res) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)), ".*") } }, stream_bind_multi_row = function(ctx, con) { # # @section Specification: # The elements of the `params` argument do not need to be scalars, # vectors of arbitrary length placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1:3, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 3L) result <- data.frame(a = c(1.5, 2.5, 2.5)) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_multi_row_zero_length = function(ctx, con) { # # (including length 0) # are supported. # For queries, calling `dbFetch()` binding such parameters returns # concatenated results, equivalent to binding and fetching for each set # of values and connecting via [rbind()]. skip_if_not_dbitest(ctx, "1.7.99.12") placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(integer(0), integer(0), check.names = FALSE), names = c("", "")) placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 0L) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_multi_row_statement = function(ctx, con) { # # For data manipulation statements, `dbGetRowsAffected()` returns the # total number of rows affected if binding non-scalar parameters. placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check allow_na_rows_affected <- ctx$tweaks$allow_na_rows_affected for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1:3, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) data <- data.frame(a = rep(1:5, 1:5), b = 1:15) table_name <- random_table_name() dbWriteTable(con, table_name, data, temporary = TRUE) sql <- paste0("UPDATE ", dbQuoteIdentifier(con, table_name), " SET b = b + 1 WHERE ") sql <- paste0(sql, "a = ", placeholder[[1L]]) res <- dbSendStatement(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows_affected <- dbGetRowsAffected(res) if (!isTRUE(allow_na_rows_affected) || !is.na(rows_affected)) { expect_equal(rows_affected, 6L) } expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_repeated = function(ctx, con) { # # `dbBind()` also accepts repeated calls on the same result set # for both queries placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_repeated_statement = function(ctx, con) { # # and data manipulation statements, placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check allow_na_rows_affected <- ctx$tweaks$allow_na_rows_affected for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) data <- data.frame(a = rep(1:5, 1:5), b = 1:15) table_name <- random_table_name() dbWriteTable(con, table_name, data, temporary = TRUE) sql <- paste0("UPDATE ", dbQuoteIdentifier(con, table_name), " SET b = b + 1 WHERE ") sql <- paste0(sql, "a = ", placeholder[[1L]]) res <- dbSendStatement(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows_affected <- dbGetRowsAffected(res) if (!isTRUE(allow_na_rows_affected) || !is.na(rows_affected)) { expect_equal(rows_affected, 1L) } dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows_affected <- dbGetRowsAffected(res) if (!isTRUE(allow_na_rows_affected) || !is.na(rows_affected)) { expect_equal(rows_affected, 1L) } expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_repeated_untouched = function(ctx, con) { # # even if no results are fetched between calls to `dbBind()`, # for both queries placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_repeated_untouched_statement = function(ctx, con) { # # and data manipulation statements. placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check allow_na_rows_affected <- ctx$tweaks$allow_na_rows_affected for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) data <- data.frame(a = rep(1:5, 1:5), b = 1:15) table_name <- random_table_name() dbWriteTable(con, table_name, data, temporary = TRUE) sql <- paste0("UPDATE ", dbQuoteIdentifier(con, table_name), " SET b = b + 1 WHERE ") sql <- paste0(sql, "a = ", placeholder[[1L]]) res <- dbSendStatement(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows_affected <- dbGetRowsAffected(res) if (!isTRUE(allow_na_rows_affected) || !is.na(rows_affected)) { expect_equal(rows_affected, 1L) } expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_named_param_shuffle = function(ctx, con) { # # If the placeholders in the query are named, # their order in the `params` argument is not important. placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1.5, 2.5, 3.5, NA_real_, check.names = FALSE), names = character(4)) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) bind_values_patched <- bind_values[c(3, 1, 2, 4)] placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_integer = function(ctx, con) { # # At least the following data types are accepted on input (including [NA]): # - [integer] placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, 2L, 3L, NA_integer_, check.names = FALSE), names = character(4)) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_numeric = function(ctx, con) { # # - [numeric] placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1.5, 2.5, 3.5, NA_real_, check.names = FALSE), names = character(4)) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_logical = function(ctx, con) { # # - [logical] for Boolean values placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(TRUE, FALSE, NA, check.names = FALSE), names = character(3)) placeholder <- placeholder_fun(3L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[3L] <- paste0("(", is_null_check(placeholder[3L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_character = function(ctx, con) { # # - [character] placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame("\U{41A}\U{438}\U{440}\U{438}\U{43B}\U{43B}", "M\U{FC}ller", `Encoding<-`("M\xfcller", "latin1"), "\U{6211}\U{662F}\U{8C01}", "ASCII", NA_character_, check.names = FALSE), names = character(6) ) placeholder <- placeholder_fun(6L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[6L] <- paste0("(", is_null_check(placeholder[6L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d, ") sql <- paste0(sql, "CASE WHEN ", result_check[[5L]], " THEN 1.5 ELSE 2.5 END AS e, ") sql <- paste0(sql, "CASE WHEN ", result_check[[6L]], " THEN 1.5 ELSE 2.5 END AS f") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5, e = 1.5, f = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_character_escape = function(ctx, con) { # # (also with special characters such as spaces, newlines, quotes, and backslashes) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(" ", "\n", "\r", "\b", "'", '"', "[", "]", "\\", NA_character_, check.names = FALSE), names = character(10) ) placeholder <- placeholder_fun(10L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[10L] <- paste0("(", is_null_check(placeholder[10L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d, ") sql <- paste0(sql, "CASE WHEN ", result_check[[5L]], " THEN 1.5 ELSE 2.5 END AS e, ") sql <- paste0(sql, "CASE WHEN ", result_check[[6L]], " THEN 1.5 ELSE 2.5 END AS f, ") sql <- paste0(sql, "CASE WHEN ", result_check[[7L]], " THEN 1.5 ELSE 2.5 END AS g, ") sql <- paste0(sql, "CASE WHEN ", result_check[[8L]], " THEN 1.5 ELSE 2.5 END AS h, ") sql <- paste0(sql, "CASE WHEN ", result_check[[9L]], " THEN 1.5 ELSE 2.5 END AS i, ") sql <- paste0(sql, "CASE WHEN ", result_check[[10L]], " THEN 1.5 ELSE 2.5 END AS j") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5, e = 1.5, f = 1.5, g = 1.5, h = 1.5, i = 1.5, j = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_factor = function(ctx, con) { # # - [factor] (bound as character, # with warning) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(factor("\U{41A}\U{438}\U{440}\U{438}\U{43B}\U{43B}"), factor("M\U{FC}ller"), factor(`Encoding<-`("M\xfcller", "latin1")), factor("\U{6211}\U{662F}\U{8C01}"), factor("ASCII"), factor(NA_character_), check.names = FALSE), names = character(6) ) placeholder <- placeholder_fun(6L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[6L] <- paste0("(", is_null_check(placeholder[6L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d, ") sql <- paste0(sql, "CASE WHEN ", result_check[[5L]], " THEN 1.5 ELSE 2.5 END AS e, ") sql <- paste0(sql, "CASE WHEN ", result_check[[6L]], " THEN 1.5 ELSE 2.5 END AS f") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5, e = 1.5, f = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_date = function(ctx, con) { # # - [Date] skip_if(!isTRUE(ctx$tweaks$date_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(as.Date("2023-12-17"), as.Date("2023-12-18"), as.Date("2023-12-19"), as.Date(NA), check.names = FALSE), names = character(4) ) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_date_integer = function(ctx, con) { # # (also when stored internally as integer) skip_if(!isTRUE(ctx$tweaks$date_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(structure(18618L, class = "Date"), structure(18619L, class = "Date"), structure(18620L, class = "Date"), structure(NA_integer_, class = "Date"), check.names = FALSE), names = character(4) ) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_timestamp = function(ctx, con) { # # - [POSIXct] timestamps skip_if(!isTRUE(ctx$tweaks$timestamp_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(as.POSIXct("2023-12-17 02:40:22"), as.POSIXct("2023-12-17 02:40:23"), as.POSIXct("2023-12-17 02:40:24"), as.POSIXct(NA_character_), check.names = FALSE), names = character(4) ) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_timestamp_lt = function(ctx, con) { # # - [POSIXlt] timestamps skip_if(!isTRUE(ctx$tweaks$timestamp_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(as.POSIXct("2023-12-17 02:40:49"), as.POSIXct("2023-12-17 02:40:50"), as.POSIXct("2023-12-17 02:40:51"), as.POSIXct(NA_character_), check.names = FALSE), names = character(4) ) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_time_seconds = function(ctx, con) { # # - [difftime] values skip_if(!isTRUE(ctx$tweaks$time_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(as.difftime(1, units = "secs"), as.difftime(2, units = "secs"), as.difftime(3, units = "secs"), as.difftime(NA_real_, units = "secs"), check.names = FALSE), names = character(4) ) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_time_hours = function(ctx, con) { # # (also with units other than seconds skip_if(!isTRUE(ctx$tweaks$time_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(as.difftime(1, units = "hours"), as.difftime(2, units = "hours"), as.difftime(3, units = "hours"), as.difftime(NA_real_, units = "hours"), check.names = FALSE), names = character(4) ) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_time_minutes_integer = function(ctx, con) { # # and with the value stored as integer) skip_if(!isTRUE(ctx$tweaks$time_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(as.difftime(1, units = "mins"), as.difftime(2, units = "mins"), as.difftime(3, units = "mins"), as.difftime(NA_real_, units = "mins"), check.names = FALSE), names = character(4) ) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, stream_bind_blob = function(ctx, con) { # # - objects of type [blob::blob] skip_if(isTRUE(ctx$tweaks$omit_blob_tests)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check cast_fun <- ctx$tweaks$blob_cast for (placeholder_fun in placeholder_funs) { bind_values <- structure( list( blob::blob(as.raw(c(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))), blob::blob(raw(3)), structure(vctrs::list_of(NULL, .ptype = raw(0)), class = c("blob", "vctrs_list_of", "vctrs_vctr", "list")) ), names = character(3), class = "data.frame", row.names = c(NA, -1L) ) placeholder <- placeholder_fun(3L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", cast_fun(placeholder), " = ", placeholder_values, ")") result_check[3L] <- paste0("(", is_null_check(cast_fun(placeholder)[3L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, NULL ) DBItest/R/spec-compliance-methods.R0000644000176200001440000000745314602017371016611 0ustar liggesusers#' spec_compliance_methods #' @family compliance specifications #' @usage NULL #' @format NULL #' @importFrom callr r #' @keywords NULL #' @section DBI classes and methods: spec_compliance_methods <- list( compliance = function(ctx) { #' A backend defines three classes, key_methods <- get_key_methods() #' which are subclasses of expect_identical( names(key_methods), c( #' [DBIDriver-class], "Driver", #' [DBIConnection-class], "Connection", #' and [DBIResult-class]. "Result" ) ) pkg <- package_name(ctx) where <- asNamespace(pkg) sapply(names(key_methods), function(name) { dbi_class <- paste0("DBI", name) classes <- Filter(function(class) { extends(class, dbi_class) && getClass(class)@virtual == FALSE }, getClasses(where)) expect_gte(length(classes), 1) class <- classes[[1]] #' The backend provides implementation for all methods #' of these base classes #' that are defined but not implemented by DBI. mapply(function(method, args) { expect_has_class_method(method, class, args, where) }, names(key_methods[[name]]), key_methods[[name]]) }) # }, reexport = function(ctx) { #' All methods defined in \pkg{DBI} are reexported (so that the package can #' be used without having to attach \pkg{DBI}), pkg <- package_name(ctx) where <- asNamespace(pkg) dbi_names <- dbi_generics(ctx$tweaks$dbitest_version) # Suppressing warning "... may not be available when loading" exported_names <- suppressWarnings(callr::r( function(pkg) { tryCatch( getNamespaceExports(getNamespace(pkg)), error = function(e) character() ) }, args = list(pkg = pkg) )) # Guard against scenarios where package is not installed if (length(exported_names) == 0) { skip("reexport: package must be installed for this test") } missing <- setdiff(dbi_names, exported_names) expect_equal(paste(missing, collapse = ", "), "") }, ellipsis = function(ctx) { #' and have an ellipsis `...` in their formals for extensibility. pkg <- package_name(ctx) where <- asNamespace(pkg) methods <- s4_methods(where, function(x) x == "DBI") methods <- methods[grep("^db", names(methods))] Map(expect_ellipsis_in_formals, methods, names(methods)) }, # NULL ) # Helpers ----------------------------------------------------------------- #' @importFrom methods hasMethod expect_has_class_method <- function(name, class, args, driver_package) { full_args <- c(class, args) eval(bquote( expect_true(hasMethod(.(name), .(full_args), driver_package)) )) } expect_ellipsis_in_formals <- function(method, name) { sym <- as.name(name) eval(bquote({ .(sym) <- method expect_true("..." %in% s4_real_argument_names(.(sym))) })) } get_key_methods <- function() { list( Driver = list( "dbConnect" = NULL, "dbDataType" = NULL ), Connection = list( "dbDisconnect" = NULL, "dbGetInfo" = NULL, "dbSendQuery" = "character", "dbListFields" = "character", "dbListTables" = NULL, "dbReadTable" = "character", "dbWriteTable" = c("character", "data.frame"), "dbExistsTable" = "character", "dbRemoveTable" = "character", "dbBegin" = NULL, "dbCommit" = NULL, "dbRollback" = NULL, "dbIsValid" = NULL, "dbQuoteString" = "character", "dbQuoteIdentifier" = "character" ), Result = list( "dbIsValid" = NULL, "dbFetch" = NULL, "dbClearResult" = NULL, "dbColumnInfo" = NULL, "dbGetRowsAffected" = NULL, "dbGetRowCount" = NULL, "dbHasCompleted" = NULL, "dbGetStatement" = NULL, "dbBind" = NULL ) ) } DBItest/R/test-stress.R0000644000176200001440000000061314602017371014375 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_stress()]: #' Stress tests (not tested with `test_all`) NULL #' Stress tests #' #' @inheritParams test_all #' @include test-compliance.R #' @family tests #' @keywords internal #' @export test_stress <- function(skip = NULL, ctx = get_default_context()) { test_suite <- "Stress" run_tests(ctx, spec_stress, skip, test_suite) } DBItest/R/utils.R0000644000176200001440000000510214602020561013226 0ustar liggesusersget_pkg_path <- function(ctx) { pkg_name <- package_name(ctx) expect_type(pkg_name, "character") pkg_path <- find.package(pkg_name) pkg_path } utils::globalVariables("con") utils::globalVariables("con2") local_connection <- function(ctx, ..., .local_envir = parent.frame()) { con <- connect(ctx, ...) withr::local_db_connection(con, .local_envir = .local_envir) } local_closed_connection <- function(ctx, ...) { con <- connect(ctx, ...) dbDisconnect(con) con } local_invalid_connection <- function(ctx, ...) { con <- connect(ctx, ...) dbDisconnect(con) unserialize(serialize(con, NULL)) } # Calls `dbClearResult()` on `query` after exiting `frame`. local_result <- function(query, frame = caller_env()) { res <- query withr::defer( { dbClearResult(res) }, envir = frame ) res } # Calls `try_silent(dbRemoveTable())` after exiting `frame`. local_remove_test_table <- function(con, name, frame = caller_env()) { table_name <- dbQuoteIdentifier(con, name) withr::defer( try_silent( dbRemoveTable(con, table_name) ), envir = frame ) } get_penguins <- function(ctx) { datasets_penguins <- unrowname(palmerpenguins::penguins[c(1, 153, 277), ]) # FIXME: better handling of DBI backends that do support factors datasets_penguins$species <- as.character(datasets_penguins$species) datasets_penguins$island <- as.character(datasets_penguins$island) datasets_penguins$sex <- as.character(datasets_penguins$sex) as.data.frame(datasets_penguins) } unrowname <- function(x) { rownames(x) <- NULL x } random_table_name <- function(n = 10) { # FIXME: Use parallel-safe sequence of numbers paste0("dbit", paste(sample(letters, n, replace = TRUE), collapse = "")) } try_silent <- function(code) { tryCatch( code, error = function(e) NULL ) } check_df <- function(df) { expect_s3_class(df, "data.frame") if (length(df) >= 1L) { lengths <- unname(lengths(df)) expect_equal(diff(lengths), rep(0L, length(lengths) - 1L)) expect_equal(nrow(df), lengths[[1]]) } df_names <- names(df) expect_true(all(df_names != "")) expect_false(anyNA(df_names)) df } check_arrow <- function(stream, transform = identity) { to <- function(schema, ptype) transform(ptype) if (inherits(stream, "nanoarrow_array_stream")) { on.exit(stream$release()) df <- nanoarrow::convert_array_stream(stream, to) } else if (inherits(stream, "nanoarrow_array")) { df <- nanoarrow::convert_array(stream, to) } else { stop("Unexpected conversion of type ", class(stream), ".", call. = FALSE) } check_df(df) } DBItest/R/spec-connection.R0000644000176200001440000000021214602017371015157 0ustar liggesusers#' @format NULL spec_connection <- c( spec_connection_disconnect, spec_connection_data_type, spec_connection_get_info, # NULL ) DBItest/R/spec-meta-bind.R0000644000176200001440000013137214725054222014676 0ustar liggesusers# Generated by helper-dev.R, do not edit by hand # Sources: R/spec-meta-bind-.R, R/spec-meta-bind-expr.R, R/spec-meta-bind-runner.R # This file is generated during load_all() if it's older than the sources # Last generation with dev constructive including https://github.com/cynkra/constructive/pull/504 # and https://github.com/cynkra/constructive/pull/505 . # This comment was added manually. spec_meta_bind <- list( bind_return_value = function(ctx, con) { # # @return # `dbBind()` returns the result set, # invisibly, # for queries issued by [dbSendQuery()] or [dbSendQueryArrow()] and placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbFetch(res)) expect_equal(dbGetRowCount(res), 0) expect_true(dbIsValid(res)) expect_false(dbHasCompleted(res)) bind_res <- withVisible(dbBind(res, bind_values)) expect_identical(res, bind_res$value) expect_false(bind_res$visible) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_return_value_statement = function(ctx, con) { # # also for data manipulation statements issued by # [dbSendStatement()]. placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check allow_na_rows_affected <- ctx$tweaks$allow_na_rows_affected for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) data <- data.frame(a = rep(1:5, 1:5), b = 1:15) table_name <- random_table_name() dbWriteTable(con, table_name, data, temporary = TRUE) sql <- paste0("UPDATE ", dbQuoteIdentifier(con, table_name), " SET b = b + 1 WHERE ") sql <- paste0(sql, "a = ", placeholder[[1L]]) res <- dbSendStatement(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_identical(dbGetRowsAffected(res), NA_integer_) expect_true(dbIsValid(res)) expect_false(dbHasCompleted(res)) bind_res <- withVisible(dbBind(res, bind_values)) expect_identical(res, bind_res$value) expect_false(bind_res$visible) rows_affected <- dbGetRowsAffected(res) if (!isTRUE(allow_na_rows_affected) || !is.na(rows_affected)) { expect_equal(rows_affected, 1L) } expect_error(dbClearResult(res), NA) res <- NULL } }, bind_too_many = function(ctx, con) { # # @section Failure modes: # Binding too many placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- if (is.null(names(bind_values))) { c(bind_values, bind_values[[1L]]) } else { c(bind_values, bogus = bind_values[[1L]]) } placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, bind_not_enough = function(ctx, con) { # # or not enough values, placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1:2 placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) bind_values_patched <- bind_values[-1L] placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, bind_wrong_name = function(ctx, con) { # # or parameters with wrong names placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- stats::setNames(bind_values, paste0("bogus", names(bind_values))) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, bind_multi_row_unequal_length = function(ctx, con) { # # or unequal length, # also raises an error. placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check allow_na_rows_affected <- ctx$tweaks$allow_na_rows_affected for (placeholder_fun in placeholder_funs) { bind_values <- list(1:3, 2:4) placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) bind_values_patched <- { bind_values[[2]] <- bind_values[[2]][-1] bind_values } data <- data.frame(a = rep(1:5, 1:5), b = 1:15) table_name <- random_table_name() dbWriteTable(con, table_name, data, temporary = TRUE) sql <- paste0("UPDATE ", dbQuoteIdentifier(con, table_name), " SET b = b + 1 WHERE ") sql <- paste0(sql, "a = ", placeholder[[1L]], " AND ") sql <- paste0(sql, "b = ", placeholder[[2L]]) res <- dbSendStatement(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, bind_named_param_unnamed_placeholders = function(ctx, con) { # # If the placeholders in the query are named, # all parameter values must have names placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- stats::setNames(bind_values, NULL) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, bind_named_param_empty_placeholders = function(ctx, con) { # # (which must not be empty placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- list(1L, 2L) placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) bind_values_patched <- { names(bind_values)[[1]] <- "" bind_values } placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, bind_named_param_na_placeholders = function(ctx, con) { # # or `NA`), placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- list(1L, 2L) placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) bind_values_patched <- { names(bind_values)[[1]] <- NA bind_values } placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, bind_unnamed_param_named_placeholders = function(ctx, con) { # # and vice versa, # otherwise an error is raised. placeholder_funs <- get_placeholder_funs(ctx, requires_names = FALSE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- stats::setNames(bind_values, letters[seq_along(bind_values)]) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBind(res, bind_values_patched), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, bind_premature_clear = function(ctx, con) { # # Calling `dbBind()` on a result set already cleared by [dbClearResult()] # also raises an error. placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) dbClearResult(res) expect_error(dbBind(res, bind_values), ".*") } }, bind_multi_row = function(ctx, con) { # # @section Specification: # The elements of the `params` argument do not need to be scalars, # vectors of arbitrary length placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- list(1:3) placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 3L) result <- data.frame(a = c(1.5, 2.5, 2.5)) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_multi_row_zero_length = function(ctx, con) { # # (including length 0) # are supported. # For queries, calling `dbFetch()` binding such parameters returns # concatenated results, equivalent to binding and fetching for each set # of values and connecting via [rbind()]. placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- list(integer(0), integer(0)) placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 0L) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_multi_row_statement = function(ctx, con) { # # For data manipulation statements, `dbGetRowsAffected()` returns the # total number of rows affected if binding non-scalar parameters. placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check allow_na_rows_affected <- ctx$tweaks$allow_na_rows_affected for (placeholder_fun in placeholder_funs) { bind_values <- list(1:3) placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) data <- data.frame(a = rep(1:5, 1:5), b = 1:15) table_name <- random_table_name() dbWriteTable(con, table_name, data, temporary = TRUE) sql <- paste0("UPDATE ", dbQuoteIdentifier(con, table_name), " SET b = b + 1 WHERE ") sql <- paste0(sql, "a = ", placeholder[[1L]]) res <- dbSendStatement(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows_affected <- dbGetRowsAffected(res) if (!isTRUE(allow_na_rows_affected) || !is.na(rows_affected)) { expect_equal(rows_affected, 6L) } expect_error(dbClearResult(res), NA) res <- NULL } }, bind_repeated = function(ctx, con) { # # `dbBind()` also accepts repeated calls on the same result set # for both queries placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_repeated_statement = function(ctx, con) { # # and data manipulation statements, placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check allow_na_rows_affected <- ctx$tweaks$allow_na_rows_affected for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) data <- data.frame(a = rep(1:5, 1:5), b = 1:15) table_name <- random_table_name() dbWriteTable(con, table_name, data, temporary = TRUE) sql <- paste0("UPDATE ", dbQuoteIdentifier(con, table_name), " SET b = b + 1 WHERE ") sql <- paste0(sql, "a = ", placeholder[[1L]]) res <- dbSendStatement(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows_affected <- dbGetRowsAffected(res) if (!isTRUE(allow_na_rows_affected) || !is.na(rows_affected)) { expect_equal(rows_affected, 1L) } dbBind(res, bind_values) rows_affected <- dbGetRowsAffected(res) if (!isTRUE(allow_na_rows_affected) || !is.na(rows_affected)) { expect_equal(rows_affected, 1L) } expect_error(dbClearResult(res), NA) res <- NULL } }, bind_repeated_untouched = function(ctx, con) { # # even if no results are fetched between calls to `dbBind()`, # for both queries placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_repeated_untouched_statement = function(ctx, con) { # # and data manipulation statements. placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check allow_na_rows_affected <- ctx$tweaks$allow_na_rows_affected for (placeholder_fun in placeholder_funs) { bind_values <- 1L placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) data <- data.frame(a = rep(1:5, 1:5), b = 1:15) table_name <- random_table_name() dbWriteTable(con, table_name, data, temporary = TRUE) sql <- paste0("UPDATE ", dbQuoteIdentifier(con, table_name), " SET b = b + 1 WHERE ") sql <- paste0(sql, "a = ", placeholder[[1L]]) res <- dbSendStatement(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) dbBind(res, bind_values) rows_affected <- dbGetRowsAffected(res) if (!isTRUE(allow_na_rows_affected) || !is.na(rows_affected)) { expect_equal(rows_affected, 1L) } expect_error(dbClearResult(res), NA) res <- NULL } }, bind_named_param_shuffle = function(ctx, con) { # # If the placeholders in the query are named, # their order in the `params` argument is not important. placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- c(1.5, 2.5, 3.5, NA) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) bind_values_patched <- bind_values[c(3, 1, 2, 4)] placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values_patched) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_integer = function(ctx, con) { # # At least the following data types are accepted on input (including [NA]): # - [integer] placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- c(1L, 2L, 3L, NA) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_numeric = function(ctx, con) { # # - [numeric] placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- c(1.5, 2.5, 3.5, NA) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_logical = function(ctx, con) { # # - [logical] for Boolean values placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- c(TRUE, FALSE, NA) placeholder <- placeholder_fun(3L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[3L] <- paste0("(", is_null_check(placeholder[3L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_character = function(ctx, con) { # # - [character] placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- c("\U{41A}\U{438}\U{440}\U{438}\U{43B}\U{43B}", "M\U{FC}ller", `Encoding<-`("M\xfcller", "latin1"), "\U{6211}\U{662F}\U{8C01}", "ASCII", NA) placeholder <- placeholder_fun(6L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[6L] <- paste0("(", is_null_check(placeholder[6L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d, ") sql <- paste0(sql, "CASE WHEN ", result_check[[5L]], " THEN 1.5 ELSE 2.5 END AS e, ") sql <- paste0(sql, "CASE WHEN ", result_check[[6L]], " THEN 1.5 ELSE 2.5 END AS f") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5, e = 1.5, f = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_character_escape = function(ctx, con) { # # (also with special characters such as spaces, newlines, quotes, and backslashes) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- c(" ", "\n", "\r", "\b", "'", '"', "[", "]", "\\", NA) placeholder <- placeholder_fun(10L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[10L] <- paste0("(", is_null_check(placeholder[10L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d, ") sql <- paste0(sql, "CASE WHEN ", result_check[[5L]], " THEN 1.5 ELSE 2.5 END AS e, ") sql <- paste0(sql, "CASE WHEN ", result_check[[6L]], " THEN 1.5 ELSE 2.5 END AS f, ") sql <- paste0(sql, "CASE WHEN ", result_check[[7L]], " THEN 1.5 ELSE 2.5 END AS g, ") sql <- paste0(sql, "CASE WHEN ", result_check[[8L]], " THEN 1.5 ELSE 2.5 END AS h, ") sql <- paste0(sql, "CASE WHEN ", result_check[[9L]], " THEN 1.5 ELSE 2.5 END AS i, ") sql <- paste0(sql, "CASE WHEN ", result_check[[10L]], " THEN 1.5 ELSE 2.5 END AS j") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5, e = 1.5, f = 1.5, g = 1.5, h = 1.5, i = 1.5, j = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_factor = function(ctx, con) { # # - [factor] (bound as character, # with warning) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- list(factor("\U{41A}\U{438}\U{440}\U{438}\U{43B}\U{43B}"), factor("M\U{FC}ller"), factor(`Encoding<-`("M\xfcller", "latin1")), factor("\U{6211}\U{662F}\U{8C01}"), factor("ASCII"), factor(NA_character_)) placeholder <- placeholder_fun(6L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[6L] <- paste0("(", is_null_check(placeholder[6L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d, ") sql <- paste0(sql, "CASE WHEN ", result_check[[5L]], " THEN 1.5 ELSE 2.5 END AS e, ") sql <- paste0(sql, "CASE WHEN ", result_check[[6L]], " THEN 1.5 ELSE 2.5 END AS f") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) suppressWarnings(expect_warning(dbBind(res, bind_values))) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5, e = 1.5, f = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_date = function(ctx, con) { # # - [Date] skip_if(!isTRUE(ctx$tweaks$date_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- as.Date(c("2023-12-17", "2023-12-18", "2023-12-19", NA)) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_date_integer = function(ctx, con) { # # (also when stored internally as integer) skip_if(!isTRUE(ctx$tweaks$date_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(c(18618L, 18619L, 18620L, NA), class = "Date") placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_timestamp = function(ctx, con) { # # - [POSIXct] timestamps skip_if(!isTRUE(ctx$tweaks$timestamp_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- as.POSIXct(c("2023-12-17 02:40:22", "2023-12-17 02:40:23", "2023-12-17 02:40:24", NA)) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_timestamp_lt = function(ctx, con) { # # - [POSIXlt] timestamps skip_if(!isTRUE(ctx$tweaks$timestamp_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- list(as.POSIXlt("2023-12-17 02:40:49"), as.POSIXlt("2023-12-17 02:40:50"), as.POSIXlt("2023-12-17 02:40:51"), as.POSIXlt(NA_character_)) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_time_seconds = function(ctx, con) { # # - [difftime] values skip_if(!isTRUE(ctx$tweaks$time_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- as.difftime(c(1, 2, 3, NA), units = "secs") placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_time_hours = function(ctx, con) { # # (also with units other than seconds skip_if(!isTRUE(ctx$tweaks$time_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- as.difftime(c(1, 2, 3, NA), units = "hours") placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_time_minutes_integer = function(ctx, con) { # # and with the value stored as integer) skip_if(!isTRUE(ctx$tweaks$time_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- as.difftime(c(1, 2, 3, NA), units = "mins") placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_raw = function(ctx, con) { # # - lists of [raw] for blobs (with `NULL` entries for SQL NULL values) skip_if(isTRUE(ctx$tweaks$omit_blob_tests)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check cast_fun <- ctx$tweaks$blob_cast for (placeholder_fun in placeholder_funs) { bind_values <- list(list(as.raw(c(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))), list(raw(3)), list(NULL)) placeholder <- placeholder_fun(3L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", cast_fun(placeholder), " = ", placeholder_values, ")") result_check[3L] <- paste0("(", is_null_check(cast_fun(placeholder)[3L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, bind_blob = function(ctx, con) { # # - objects of type [blob::blob] skip_if(isTRUE(ctx$tweaks$omit_blob_tests)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check cast_fun <- ctx$tweaks$blob_cast for (placeholder_fun in placeholder_funs) { bind_values <- list( blob::blob(as.raw(c(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))), blob::blob(raw(3)), structure(vctrs::list_of(NULL, .ptype = raw(0)), class = c("blob", "vctrs_list_of", "vctrs_vctr", "list")) ) placeholder <- placeholder_fun(3L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", cast_fun(placeholder), " = ", placeholder_values, ")") result_check[3L] <- paste0("(", is_null_check(cast_fun(placeholder)[3L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c") res <- dbSendQuery(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBind(res, bind_values) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, NULL ) DBItest/R/spec-transaction-begin-commit-rollback.R0000644000176200001440000001274514602020561021515 0ustar liggesusers#' spec_transaction_begin_commit_rollback #' @family transaction specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_transaction_begin_commit_rollback <- list( begin_formals = function() { # expect_equal(names(formals(dbBegin)), c("conn", "...")) }, # commit_formals = function() { # expect_equal(names(formals(dbCommit)), c("conn", "...")) }, # rollback_formals = function() { # expect_equal(names(formals(dbRollback)), c("conn", "...")) }, begin_commit_return_value = function(con) { #' @return #' `dbBegin()`, `dbCommit()` and `dbRollback()` return `TRUE`, invisibly. expect_invisible_true(dbBegin(con)) on.exit({ dbRollback(con) }) expect_invisible_true(dbCommit(con)) on.exit(NULL) }, # begin_rollback_return_value = function(con) { expect_invisible_true(dbBegin(con)) expect_invisible_true(dbRollback(con)) }, #' begin_commit_closed = function(ctx, closed_con) { #' @section Failure modes: #' The implementations are expected to raise an error in case of failure, #' but this is not tested. #' In any way, all generics throw an error with a closed expect_error(dbBegin(closed_con)) expect_error(dbCommit(closed_con)) expect_error(dbRollback(closed_con)) }, # begin_commit_invalid = function(ctx, invalid_con) { #' or invalid connection. expect_error(dbBegin(invalid_con)) expect_error(dbCommit(invalid_con)) expect_error(dbRollback(invalid_con)) }, # commit_without_begin = function(con) { #' In addition, a call to `dbCommit()` expect_error(dbCommit(con)) }, # rollback_without_begin = function(con) { #' or `dbRollback()` #' without a prior call to `dbBegin()` raises an error. expect_error(dbRollback(con)) }, # begin_begin = function(con) { #' Nested transactions are not supported by DBI, #' an attempt to call `dbBegin()` twice dbBegin(con) on.exit({ dbRollback(con) }) #' yields an error. expect_error(dbBegin(con)) dbCommit(con) on.exit(NULL) }, begin_commit = function(con) { #' @section Specification: #' Actual support for transactions may vary between backends. #' A transaction is initiated by a call to `dbBegin()` dbBegin(con) #' and committed by a call to `dbCommit()`. success <- FALSE expect_error( { dbCommit(con) success <- TRUE }, NA ) if (!success) dbRollback(con) }, begin_write_commit_1 = function(con) { #' Data written in a transaction must persist after the transaction is committed. #' For example, a record that is missing when the transaction is started # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit00" dbWriteTable(con, table_name, data.frame(a = 0L), overwrite = TRUE) dbBegin(con) on.exit({ dbRollback(con) }) #' but is created during the transaction dbExecute(con, paste0("INSERT INTO ", table_name, " (a) VALUES (1)")) #' must exist expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0:1)) #' both during dbCommit(con) on.exit(NULL) #' and after the transaction, expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0:1)) }, # second stage begin_write_commit_2 = function(con, table_name = "dbit00") { #' and also in a new connection. expect_true(dbExistsTable(con, table_name)) expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0:1)) }, # #' begin_rollback = function(con) { #' A transaction dbBegin(con) #' can also be aborted with `dbRollback()`. expect_error(dbRollback(con), NA) }, begin_write_rollback = function(con, table_name) { #' All data written in such a transaction must be removed after the #' transaction is rolled back. #' For example, a record that is missing when the transaction is started dbWriteTable(con, table_name, data.frame(a = 0L), overwrite = TRUE) dbBegin(con) #' but is created during the transaction dbWriteTable(con, table_name, data.frame(a = 1L), append = TRUE) #' must not exist anymore after the rollback. dbRollback(con) expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0L)) }, # begin_write_disconnect_1 = function(local_con) { # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit01" #' #' Disconnection from a connection with an open transaction dbWriteTable(local_con, table_name, data.frame(a = 0L), overwrite = TRUE) dbBegin(local_con) dbWriteTable(local_con, table_name, data.frame(a = 1L), append = TRUE) }, # begin_write_disconnect_2 = function(local_con, table_name = "dbit01") { #' effectively rolls back the transaction. #' All data written in such a transaction must be removed after the #' transaction is rolled back. expect_equal(check_df(dbReadTable(local_con, table_name)), data.frame(a = 0L)) }, #' #' The behavior is not specified if other arguments are passed to these #' functions. In particular, \pkg{RSQLite} issues named transactions #' with support for nesting #' if the `name` argument is set. #' #' The transaction isolation level is not specified by DBI. NULL ) DBItest/R/spec-sql-write-table.R0000644000176200001440000007077214725004207016056 0ustar liggesusers#' spec_sql_write_table #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL #' @importFrom lubridate with_tz spec_sql_write_table <- list( write_table_formals = function() { # expect_equal(names(formals(dbWriteTable)), c("conn", "name", "value", "...")) }, write_table_return = function(con, table_name) { #' @return #' `dbWriteTable()` returns `TRUE`, invisibly. expect_invisible_true(dbWriteTable(con, table_name, data.frame(a = 1L))) }, #' write_table_overwrite = function(con, table_name) { #' @section Failure modes: #' If the table exists, and both `append` and `overwrite` arguments are unset, test_in <- data.frame(a = 1L) dbWriteTable(con, table_name, test_in) expect_error(dbWriteTable(con, table_name, data.frame(a = 2L))) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, write_table_append_incompatible = function(con, table_name) { #' or `append = TRUE` and the data frame with the new data has different #' column names, #' an error is raised; the remote table remains unchanged. test_in <- data.frame(a = 1L) dbWriteTable(con, table_name, test_in) expect_error(dbWriteTable(con, table_name, data.frame(b = 2L), append = TRUE)) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, #' write_table_closed_connection = function(ctx, closed_con) { #' An error is raised when calling this method for a closed expect_error(dbWriteTable(closed_con, "test", data.frame(a = 1))) }, write_table_invalid_connection = function(ctx, invalid_con) { #' or invalid connection. expect_error(dbWriteTable(invalid_con, "test", data.frame(a = 1))) }, write_table_error = function(ctx, con, table_name) { #' An error is also raised test_in <- data.frame(a = 1L) #' if `name` cannot be processed with [dbQuoteIdentifier()] or expect_error(dbWriteTable(con, NA, test_in)) #' if this results in a non-scalar. expect_error(dbWriteTable(con, c(table_name, table_name), test_in)) #' Invalid values for the additional arguments `row.names`, #' `overwrite`, `append`, `field.types`, and `temporary` #' (non-scalars, expect_error(dbWriteTable(con, table_name, test_in, row.names = letters)) expect_error(dbWriteTable(con, table_name, test_in, overwrite = c(TRUE, FALSE))) expect_error(dbWriteTable(con, table_name, test_in, append = c(TRUE, FALSE))) expect_error(dbWriteTable(con, table_name, test_in, temporary = c(TRUE, FALSE))) #' unsupported data types, expect_error(dbWriteTable(con, table_name, test_in, row.names = list(1L))) expect_error(dbWriteTable(con, table_name, test_in, overwrite = 1L)) expect_error(dbWriteTable(con, table_name, test_in, append = 1L)) expect_error(dbWriteTable(con, table_name, test_in, field.types = 1L)) expect_error(dbWriteTable(con, table_name, test_in, temporary = 1L)) #' `NA`, expect_error(dbWriteTable(con, table_name, test_in, overwrite = NA)) expect_error(dbWriteTable(con, table_name, test_in, append = NA)) expect_error(dbWriteTable(con, table_name, test_in, field.types = NA)) expect_error(dbWriteTable(con, table_name, test_in, temporary = NA)) #' incompatible values, expect_error(dbWriteTable(con, table_name, test_in, field.types = letters)) expect_error(dbWriteTable(con, table_name, test_in, field.types = c(b = "INTEGER"))) expect_error(dbWriteTable(con, table_name, test_in, overwrite = TRUE, append = TRUE)) expect_error(dbWriteTable(con, table_name, test_in, append = TRUE, field.types = c(a = "INTEGER"))) #' duplicate expect_error(dbWriteTable(con, table_name, test_in, field.types = c(a = "INTEGER", a = "INTEGER"))) #' or missing names, expect_error(dbWriteTable(con, table_name, test_in, field.types = c("INTEGER"))) #' incompatible columns) dbWriteTable(con, table_name, test_in) expect_error(dbWriteTable(con, table_name, data.frame(b = 2L, c = 3L), append = TRUE)) #' also raise an error. }, #' @section Additional arguments: #' The following arguments are not part of the `dbWriteTable()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `row.names` (default: `FALSE`) #' - `overwrite` (default: `FALSE`) #' - `append` (default: `FALSE`) #' - `field.types` (default: `NULL`) #' - `temporary` (default: `FALSE`) #' #' They must be provided as named arguments. #' See the "Specification" and "Value" sections for details on their usage. write_table_name = function(ctx, con) { #' @section Specification: #' The `name` argument is processed as follows, #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- data.frame(a = 1) local_remove_test_table(con, table_name) #' - If an unquoted table name as string: `dbWriteTable()` will do the quoting, dbWriteTable(con, table_name, test_in) test_out <- check_df(dbReadTable(con, dbQuoteIdentifier(con, table_name))) expect_equal_df(test_out, test_in) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` } }, write_table_name_quoted = function(ctx, con) { #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done skip_if_not_dbitest(ctx, "1.7.2") if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- data.frame(a = 1) local_remove_test_table(con, table_name) dbWriteTable(con, dbQuoteIdentifier(con, table_name), test_in) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) } }, #' write_table_value_df = function(con, table_name) { #' The `value` argument must be a data frame test_in <- trivial_df() dbWriteTable(con, table_name, test_in) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, write_table_value_subset = function(ctx, con, table_name) { #' with a subset of the columns of the existing table if `append = TRUE`. test_in <- trivial_df(3, letters[1:3]) dbCreateTable(con, table_name, test_in) dbWriteTable(con, table_name, test_in[2], append = TRUE) test_out <- check_df(dbReadTable(con, table_name)) test_in[c(1, 3)] <- NA_real_ expect_equal_df(test_out, test_in) }, write_table_value_shuffle = function(ctx, con, table_name) { #' The order of the columns does not matter with `append = TRUE`. test_in <- trivial_df(3, letters[1:3]) dbCreateTable(con, table_name, test_in) dbWriteTable(con, table_name, test_in[c(2, 3, 1)], append = TRUE) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, # write_table_value_shuffle_subset = function(ctx, con, table_name) { test_in <- trivial_df(4, letters[1:4]) dbCreateTable(con, table_name, test_in) dbWriteTable(con, table_name, test_in[c(4, 1, 3)], append = TRUE) test_out <- check_df(dbReadTable(con, table_name)) test_in[2] <- NA_real_ expect_equal_df(test_out, test_in) }, #' overwrite_table = function(ctx, con, table_name) { #' If the `overwrite` argument is `TRUE`, an existing table of the same name #' will be overwritten. penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) expect_error( dbWriteTable(con, table_name, penguins[1, ], overwrite = TRUE), NA ) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins[1, ]) }, overwrite_table_missing = function(ctx, con, table_name) { #' This argument doesn't change behavior if the table does not exist yet. penguins_in <- get_penguins(ctx) expect_error( dbWriteTable(con, table_name, penguins_in[1, ], overwrite = TRUE), NA ) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins_in[1, ]) }, #' append_table = function(ctx, con, table_name) { #' If the `append` argument is `TRUE`, the rows in an existing table are #' preserved, and the new data are appended. penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) expect_error(dbWriteTable(con, table_name, penguins[1, ], append = TRUE), NA) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, rbind(penguins, penguins[1, ])) }, append_table_new = function(ctx, con, table_name) { #' If the table doesn't exist yet, it is created. penguins <- get_penguins(ctx) expect_error(dbWriteTable(con, table_name, penguins[1, ], append = TRUE), NA) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins[1, ]) }, #' temporary_table_1 = function(ctx, con, table_name = "dbit08") { #' If the `temporary` argument is `TRUE`, the table is not available in a #' second connection and is gone after reconnecting. #' Not all backends support this argument. if (!isTRUE(ctx$tweaks$temporary_tables)) { skip("tweak: temporary_tables") } penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins, temporary = TRUE) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins) con2 <- local_connection(ctx) expect_error(dbReadTable(con2, table_name)) }, # second stage temporary_table_2 = function(ctx, con) { if (!isTRUE(ctx$tweaks$temporary_tables)) { skip("tweak: temporary_tables") } # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit08" expect_error(dbReadTable(con, table_name)) }, table_visible_in_other_connection_1 = function(ctx, local_con) { #' A regular, non-temporary table is visible in a second connection, penguins30 <- get_penguins(ctx) # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit09" dbWriteTable(local_con, table_name, penguins30) penguins_out <- check_df(dbReadTable(local_con, table_name)) expect_equal_df(penguins_out, penguins30) con2 <- local_connection(ctx) expect_equal_df(dbReadTable(con2, table_name), penguins30) }, # second stage table_visible_in_other_connection_2 = function(ctx, con) { #' in a pre-existing connection, penguins30 <- get_penguins(ctx) # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit09" expect_equal_df(check_df(dbReadTable(con, table_name)), penguins30) }, # third stage table_visible_in_other_connection_3 = function(ctx, local_con, table_name = "dbit09") { #' and after reconnecting to the database. penguins30 <- get_penguins(ctx) expect_equal_df(check_df(dbReadTable(local_con, table_name)), penguins30) }, #' roundtrip_keywords = function(ctx, con) { #' SQL keywords can be used freely in table names, column names, and data. tbl_in <- data.frame( select = "unique", from = "join", where = "order", stringsAsFactors = FALSE ) test_table_roundtrip(con, tbl_in, name = "exists") }, roundtrip_quotes = function(ctx, con, table_name) { #' Quotes, commas, spaces, and other special characters such as newlines and tabs, #' can also be used in the data, tbl_in <- data.frame( as.character(dbQuoteString(con, "")), as.character(dbQuoteIdentifier(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\Nb", "a\\tb", "a\\rb", "a\\bb", "a\\Zb", stringsAsFactors = FALSE ) names(tbl_in) <- letters[seq_along(tbl_in)] test_table_roundtrip(con, tbl_in) }, roundtrip_quotes_table_names = function(ctx, con) { #' and, if the database supports non-syntactic identifiers, #' also for table names if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } table_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\Nb", "a\\tb", "a\\rb", "a\\bb", "a\\Zb" ) tbl_in <- trivial_df() for (table_name in table_names) { test_table_roundtrip_one(con, tbl_in, .add_na = "none") } }, roundtrip_quotes_column_names = function(ctx, con) { #' and column names. skip_if_not_dbitest(ctx, "1.7.2") if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } column_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\nb", "a\\tb", "a\\rb", "a\\bb", "a\\zb" ) tbl_in <- trivial_df(length(column_names), column_names) test_table_roundtrip_one(con, tbl_in, .add_na = "none") }, #' roundtrip_integer = function(ctx, con) { #' The following data types must be supported at least, #' and be read identically with [dbReadTable()]: #' - integer tbl_in <- data.frame(a = c(1:5)) test_table_roundtrip(con, tbl_in) }, roundtrip_numeric = function(ctx, con) { #' - numeric tbl_in <- data.frame(a = c(seq(1, 3, by = 0.5))) test_table_roundtrip(con, tbl_in) #' (the behavior for `Inf` and `NaN` is not specified) }, roundtrip_logical = function(ctx, con) { #' - logical tbl_in <- data.frame(a = c(TRUE, FALSE, NA)) tbl_exp <- tbl_in tbl_exp$a <- ctx$tweaks$logical_return(tbl_exp$a) test_table_roundtrip(con, tbl_in, tbl_exp) }, roundtrip_null = function(ctx, con) { #' - `NA` as NULL tbl_in <- data.frame(a = NA) test_table_roundtrip( con, tbl_in, transform = function(tbl_out) { tbl_out$a <- as.logical(tbl_out$a) # Plain NA is of type logical tbl_out } ) }, #' - 64-bit values (using `"bigint"` as field type); the result can be roundtrip_64_bit_numeric = function(ctx, con) { tbl_in <- data.frame(a = c(-1e14, 1e15)) test_table_roundtrip( con, tbl_in, transform = function(tbl_out) { #' - converted to a numeric, which may lose precision, tbl_out$a <- as.numeric(tbl_out$a) tbl_out }, field.types = c(a = "BIGINT") ) }, # roundtrip_64_bit_character = function(ctx, con) { tbl_in <- data.frame(a = c(-1e14, 1e15)) tbl_exp <- tbl_in tbl_exp$a <- format(tbl_exp$a, scientific = FALSE) test_table_roundtrip( con, tbl_in, tbl_exp, transform = function(tbl_out) { #' - converted a character vector, which gives the full decimal #' representation tbl_out$a <- as.character(tbl_out$a) tbl_out }, field.types = c(a = "BIGINT") ) }, # roundtrip_64_bit_roundtrip = function(con, table_name) { tbl_in <- data.frame(a = c(-1e14, 1e15)) dbWriteTable(con, table_name, tbl_in, field.types = c(a = "BIGINT")) tbl_out <- dbReadTable(con, table_name) #' - written to another table and read again unchanged test_table_roundtrip(con, tbl_out, tbl_expected = tbl_out) }, roundtrip_character = function(ctx, con) { #' - character (in both UTF-8 tbl_in <- data.frame( id = seq_along(get_texts()), a = get_texts(), stringsAsFactors = FALSE ) test_table_roundtrip(con, tbl_in) }, roundtrip_character_native = function(ctx, con) { #' and native encodings), tbl_in <- data.frame( a = c(enc2native(get_texts())), stringsAsFactors = FALSE ) test_table_roundtrip(con, tbl_in) }, roundtrip_character_empty = function(ctx, con) { #' supporting empty strings tbl_in <- data.frame( a = c("", "a"), stringsAsFactors = FALSE ) test_table_roundtrip(con, tbl_in) }, roundtrip_character_empty_after = function(ctx, con) { #' before and after a non-empty string tbl_in <- data.frame( a = c("a", ""), stringsAsFactors = FALSE ) test_table_roundtrip(con, tbl_in) }, roundtrip_factor = function(ctx, con) { #' - factor (returned as character) tbl_in <- data.frame( a = factor(get_texts()) ) tbl_exp <- tbl_in tbl_exp$a <- as.character(tbl_exp$a) test_table_roundtrip(con, tbl_in, tbl_exp) }, roundtrip_raw = function(ctx, con) { #' - list of raw #' (if supported by the database) if (isTRUE(ctx$tweaks$omit_blob_tests)) { skip("tweak: omit_blob_tests") } tbl_in <- data.frame(id = 1L, a = I(list(as.raw(0:10)))) tbl_exp <- tbl_in tbl_exp$a <- blob::as_blob(unclass(tbl_in$a)) test_table_roundtrip( con, tbl_in, tbl_exp, transform = function(tbl_out) { tbl_out$a <- blob::as_blob(tbl_out$a) tbl_out } ) }, roundtrip_blob = function(ctx, con) { #' - objects of type [blob::blob] #' (if supported by the database) if (isTRUE(ctx$tweaks$omit_blob_tests)) { skip("tweak: omit_blob_tests") } tbl_in <- data.frame(id = 1L, a = blob::blob(as.raw(0:10))) test_table_roundtrip( con, tbl_in, transform = function(tbl_out) { tbl_out$a <- blob::as_blob(tbl_out$a) tbl_out } ) }, roundtrip_date = function(ctx, con) { #' - date #' (if supported by the database; if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } #' returned as `Date`), tbl_in <- data.frame(a = as_numeric_date(c(Sys.Date() + 1:5))) test_table_roundtrip( con, tbl_in, transform = function(tbl_out) { expect_type(unclass(tbl_out$a), "double") tbl_out } ) }, roundtrip_date_extended = function(ctx, con) { #' also for dates prior to 1970 or 1900 or after 2038 if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } tbl_in <- data.frame(a = as_numeric_date(c( "1811-11-11", "1899-12-31", "1900-01-01", "1950-05-05", "1969-12-31", "1970-01-01", "2037-01-01", "2038-01-01", "2040-01-01", "2999-09-09" ))) test_table_roundtrip( con, tbl_in, transform = function(tbl_out) { expect_type(unclass(tbl_out$a), "double") tbl_out } ) }, roundtrip_time = function(ctx, con) { #' - time #' (if supported by the database; if (!isTRUE(ctx$tweaks$time_typed)) { skip("tweak: !time_typed") } tbl_in <- data.frame(a = hms::hms(minutes = 1:5)) tbl_in$b <- .difftime(as.numeric(tbl_in$a) / 60, "mins") tbl_exp <- tbl_in tbl_exp$a <- hms::as_hms(tbl_exp$a) tbl_exp$b <- hms::as_hms(tbl_exp$b) test_table_roundtrip( con, tbl_in, tbl_exp, transform = function(tbl_out) { #' returned as objects that inherit from `difftime`) expect_s3_class(tbl_out$a, "difftime") expect_s3_class(tbl_out$b, "difftime") tbl_out$a <- hms::as_hms(tbl_out$a) tbl_out$b <- hms::as_hms(tbl_out$b) tbl_out } ) }, roundtrip_timestamp = function(ctx, con) { #' - timestamp #' (if supported by the database; if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } #' returned as `POSIXct` local <- round(Sys.time()) + c( 1, 60, 3600, 86400, 86400 * 90, 86400 * 180, 86400 * 270, 1e9, 5e9 ) attr(local, "tzone") <- "" tbl_in <- data.frame(id = seq_along(local)) tbl_in$local <- local tbl_in$gmt <- lubridate::with_tz(local, tzone = "GMT") tbl_in$pst8pdt <- lubridate::with_tz(local, tzone = "PST8PDT") tbl_in$utc <- lubridate::with_tz(local, tzone = "UTC") #' respecting the time zone but not necessarily preserving the #' input time zone), test_table_roundtrip( con, tbl_in, transform = function(out) { dates <- map_lgl(out, inherits, "POSIXt") tz <- toupper(names(out)) tz[tz == "LOCAL"] <- "" out[dates] <- Map(lubridate::with_tz, out[dates], tz[dates]) out } ) }, roundtrip_timestamp_extended = function(ctx, con) { #' also for timestamps prior to 1970 or 1900 or after 2038 if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } local <- as.POSIXct(c( "1811-11-11", "1899-12-31", "1900-01-01", "1950-05-05", "1969-12-31", "1970-01-01", "2037-01-01", "2038-01-01", "2040-01-01", "2999-09-09" )) attr(local, "tzone") <- "" tbl_in <- data.frame(id = seq_along(local)) tbl_in$local <- local tbl_in$gmt <- lubridate::with_tz(local, tzone = "GMT") tbl_in$pst8pdt <- lubridate::with_tz(local, tzone = "PST8PDT") tbl_in$utc <- lubridate::with_tz(local, tzone = "UTC") #' respecting the time zone but not necessarily preserving the #' input time zone) test_table_roundtrip( con, tbl_in, transform = function(out) { dates <- map_lgl(out, inherits, "POSIXt") tz <- toupper(names(out)) tz[tz == "LOCAL"] <- "" out[dates] <- Map(lubridate::with_tz, out[dates], tz[dates]) out } ) }, #' roundtrip_mixed = function(ctx, con) { #' Mixing column types in the same table is supported. data <- list("a", 1L, 1.5) data <- map(data, c, NA) expanded <- expand.grid(a = data, b = data, c = data) tbl_in_list <- map( seq_len(nrow(expanded)), function(i) { as.data.frame(map(expanded[i, ], unlist, recursive = FALSE)) } ) map(tbl_in_list, test_table_roundtrip, con = con) }, #' roundtrip_field_types = function(ctx, con) { #' The `field.types` argument must be a named character vector with at most #' one entry for each column. #' It indicates the SQL data type to be used for a new column. tbl_in <- data.frame(a = numeric(), b = character()) #' If a column is missed from `field.types`, the type is inferred #' from the input data with [dbDataType()]. tbl_exp <- data.frame(a = integer(), b = character()) test_table_roundtrip( con, tbl_in, tbl_exp, field.types = c(a = "INTEGER") ) tbl_in <- data.frame(a = numeric(), b = integer()) tbl_exp <- data.frame(a = integer(), b = numeric()) test_table_roundtrip( con, tbl_in, tbl_exp, field.types = c(b = "REAL", a = "INTEGER") ) }, #' write_table_row_names_false = function(ctx, con) { #' The interpretation of [rownames] depends on the `row.names` argument, #' see [sqlRownamesToColumn()] for details: #' - If `FALSE` or `NULL`, row names are ignored. for (row.names in list(FALSE, NULL)) { table_name <- random_table_name() local_remove_test_table(con, table_name) mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = row.names) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_false("row_names" %in% names(mtcars_out)) expect_equal_df(mtcars_out, unrowname(mtcars_in)) } }, # write_table_row_names_true_exists = function(ctx, con, table_name) { #' - If `TRUE`, row names are converted to a column named "row_names", row.names <- TRUE mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = row.names) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_true("row_names" %in% names(mtcars_out)) expect_true(all(rownames(mtcars_in) %in% mtcars_out$row_names)) expect_true(all(mtcars_out$row_names %in% rownames(mtcars_in))) expect_equal_df(mtcars_out[names(mtcars_out) != "row_names"], unrowname(mtcars_in)) }, # write_table_row_names_true_missing = function(ctx, con, table_name) { #' even if the input data frame only has natural row names from 1 to `nrow(...)`. row.names <- TRUE penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in, row.names = row.names) penguins_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_true("row_names" %in% names(penguins_out)) expect_true(all(rownames(penguins_in) %in% penguins_out$row_names)) expect_true(all(penguins_out$row_names %in% rownames(penguins_in))) expect_equal_df(penguins_out[names(penguins_out) != "row_names"], penguins_in) }, # write_table_row_names_na_exists = function(ctx, con, table_name) { #' - If `NA`, a column named "row_names" is created if the data has custom row names, row.names <- NA mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = row.names) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_true("row_names" %in% names(mtcars_out)) expect_true(all(rownames(mtcars_in) %in% mtcars_out$row_names)) expect_true(all(mtcars_out$row_names %in% rownames(mtcars_in))) expect_equal_df(mtcars_out[names(mtcars_out) != "row_names"], unrowname(mtcars_in)) }, # write_table_row_names_na_missing = function(ctx, con, table_name) { #' no extra column is created in the case of natural row names. row.names <- NA penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in, row.names = row.names) penguins_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_equal_df(penguins_out, penguins_in) }, # write_table_row_names_string_exists = function(ctx, con, table_name) { row.names <- "make_model" #' - If a string, this specifies the name of the column in the remote table #' that contains the row names, mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = row.names) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_true("make_model" %in% names(mtcars_out)) expect_true(all(mtcars_out$make_model %in% rownames(mtcars_in))) expect_true(all(rownames(mtcars_in) %in% mtcars_out$make_model)) expect_equal_df(mtcars_out[names(mtcars_out) != "make_model"], unrowname(mtcars_in)) }, # write_table_row_names_string_missing = function(ctx, con, table_name) { row.names <- "seq" #' even if the input data frame only has natural row names. penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in, row.names = row.names) penguins_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_true("seq" %in% names(penguins_out)) expect_true(all(penguins_out$seq %in% rownames(penguins_in))) expect_true(all(rownames(penguins_in) %in% penguins_out$seq)) expect_equal_df(penguins_out[names(penguins_out) != "seq"], penguins_in) }, # #' write_table_row_names_default = function(ctx, con, table_name) { #' The default is `row.names = FALSE`. mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_false("row_names" %in% names(mtcars_out)) expect_equal_df(mtcars_out, unrowname(mtcars_in)) }, # NULL ) test_table_roundtrip <- function(...) { test_table_roundtrip_one(..., .add_na = "none") test_table_roundtrip_one(..., .add_na = "above") test_table_roundtrip_one(..., .add_na = "below") } test_table_roundtrip_one <- function( con, tbl_in, tbl_expected = tbl_in, transform = identity, name = NULL, field.types = NULL, use_append = FALSE, .add_na = "none") { force(tbl_expected) if (.add_na == "above") { tbl_in <- add_na_above(tbl_in) tbl_expected <- add_na_above(tbl_expected) } else if (.add_na == "below") { tbl_in <- add_na_below(tbl_in) tbl_expected <- add_na_below(tbl_expected) } if (is.null(name)) { name <- random_table_name() } local_remove_test_table(con, name = name) if (use_append) { dbCreateTable(con, name, field.types %||% tbl_in) dbAppendTable(con, name, tbl_in) } else { dbWriteTable(con, name, tbl_in, field.types = field.types) } tbl_read <- check_df(dbReadTable(con, name, check.names = FALSE)) tbl_out <- transform(tbl_read) expect_equal_df(tbl_out, tbl_expected) } add_na_above <- function(tbl) { idx <- c(NA, seq_len(nrow(tbl))) tbl <- tbl[idx, , drop = FALSE] unrowname(tbl) } add_na_below <- function(tbl) { idx <- c(seq_len(nrow(tbl)), NA) tbl <- tbl[idx, , drop = FALSE] unrowname(tbl) } DBItest/R/spec-arrow.R0000644000176200001440000000137114602110354014154 0ustar liggesusers#' @format NULL spec_arrow <- c( spec_arrow_send_query_arrow, spec_arrow_fetch_arrow, spec_arrow_fetch_arrow_chunk, spec_arrow_get_query_arrow, spec_arrow_read_table_arrow, spec_arrow_write_table_arrow, spec_arrow_create_table_arrow, spec_arrow_append_table_arrow, spec_arrow_bind, spec_arrow_roundtrip, # NULL ) utils::globalVariables("select") stream_frame <- function(..., .select = NULL) { data <- data.frame(..., stringsAsFactors = FALSE, check.names = FALSE) as_is <- map_lgl(data, inherits, "AsIs") data[as_is] <- map(data[as_is], function(.x) { class(.x) <- setdiff(class(.x), "AsIs") .x }) if (!is.null(.select)) { data <- data[.select] } out <- nanoarrow::as_nanoarrow_array_stream(data) out } DBItest/R/dummy.R0000644000176200001440000000011014602017371013220 0ustar liggesusersdummy <- function() { # Satisfy R CMD check desc::desc_get_deps() } DBItest/R/spec-stress-connection.R0000644000176200001440000000136514602061640016510 0ustar liggesusers#' @format NULL #' @importFrom withr with_output_sink #' @section Connection: #' \subsection{Stress tests}{ spec_stress_connection <- list( simultaneous_connections = function(ctx) { #' Open 50 simultaneous connections cons <- list() on.exit(try_silent(map(cons, dbDisconnect)), add = TRUE) for (i in seq_len(50L)) { cons <- c(cons, connect(ctx)) } inherit_from_connection <- map_lgl(cons, is, class2 = "DBIConnection") expect_true(all(inherit_from_connection)) }, stress_connections = function(ctx) { #' Open and close 50 connections for (i in seq_len(50L)) { con <- connect(ctx) expect_s4_class(con, "DBIConnection") expect_error(dbDisconnect(con), NA) } }, #' } NULL ) DBItest/R/spec-sql-read-table.R0000644000176200001440000002250614602020561015622 0ustar liggesusers#' spec_sql_read_table #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_read_table <- list( read_table_formals = function() { # expect_equal(names(formals(dbReadTable)), c("conn", "name", "...")) }, read_table = function(ctx, con, table_name) { #' @return #' `dbReadTable()` returns a data frame that contains the complete data #' from the remote table, effectively the result of calling [dbGetQuery()] with #' `SELECT * FROM `. penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins_in) }, #' read_table_missing = function(con, table_name) { #' @section Failure modes: #' An error is raised if the table does not exist. stopifnot(!dbExistsTable(con, table_name)) expect_error(dbReadTable(con, table_name)) }, read_table_empty = function(ctx, con, table_name) { #' @return #' An empty table is returned as a data frame with zero rows. penguins_in <- get_penguins(ctx)[integer(), ] dbWriteTable(con, table_name, penguins_in) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal(nrow(penguins_out), 0L) expect_equal_df(penguins_out, penguins_in) }, #' read_table_row_names_false = function(con) { #' The presence of [rownames] depends on the `row.names` argument, #' see [sqlColumnToRownames()] for details: #' - If `FALSE` or `NULL`, the returned data frame doesn't have row names. for (row.names in list(FALSE, NULL)) { table_name <- random_table_name() local_remove_test_table(con, table_name) mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = TRUE) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = row.names)) expect_true("row_names" %in% names(mtcars_out)) expect_true(all(mtcars_out$row_names %in% rownames(mtcars_in))) expect_true(all(rownames(mtcars_in) %in% mtcars_out$row_names)) expect_equal_df(mtcars_out[names(mtcars_out) != "row_names"], unrowname(mtcars_in)) } }, # read_table_row_names_true_exists = function(con, table_name) { #' - If `TRUE`, a column named "row_names" is converted to row names. row.names <- TRUE mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = NA) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = row.names)) expect_equal_df(mtcars_out, mtcars_in) }, #' read_table_row_names_true_missing = function(ctx, con, table_name) { #' @section Failure modes: #' An error is raised if `row.names` is `TRUE` and no "row_names" column exists, row.names <- TRUE penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in, row.names = NA) expect_error(dbReadTable(con, table_name, row.names = row.names)) }, # read_table_row_names_na_exists = function(con, table_name) { #' @return #' - If `NA`, a column named "row_names" is converted to row names if it exists, row.names <- NA mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = TRUE) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = row.names)) expect_equal_df(mtcars_out, mtcars_in) }, # read_table_row_names_na_missing = function(ctx, con, table_name) { #' otherwise no translation occurs. row.names <- NA penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in, row.names = FALSE) penguins_out <- check_df(dbReadTable(con, table_name, row.names = row.names)) expect_equal_df(penguins_out, penguins_in) }, # read_table_row_names_string_exists = function(con, table_name) { #' - If a string, this specifies the name of the column in the remote table #' that contains the row names. row.names <- "make_model" mtcars_in <- datasets::mtcars mtcars_in$make_model <- rownames(mtcars_in) mtcars_in <- unrowname(mtcars_in) dbWriteTable(con, table_name, mtcars_in, row.names = FALSE) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = row.names)) expect_false("make_model" %in% names(mtcars_out)) expect_true(all(mtcars_in$make_model %in% rownames(mtcars_out))) expect_true(all(rownames(mtcars_out) %in% mtcars_in$make_model)) expect_equal_df(unrowname(mtcars_out), mtcars_in[names(mtcars_in) != "make_model"]) }, #' read_table_row_names_string_missing = function(ctx, con, table_name) { #' @section Failure modes: #' An error is raised if `row.names` is set to a string and no corresponding column exists. row.names <- "missing" penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in, row.names = FALSE) expect_error(dbReadTable(con, table_name, row.names = row.names)) }, read_table_row_names_default = function(con, table_name) { #' @return #' The default is `row.names = FALSE`. #' mtcars_in <- datasets::mtcars dbWriteTable(con, table_name, mtcars_in, row.names = TRUE) mtcars_out <- check_df(dbReadTable(con, table_name)) expect_true("row_names" %in% names(mtcars_out)) expect_true(all(mtcars_out$row_names %in% rownames(mtcars_in))) expect_true(all(rownames(mtcars_in) %in% mtcars_out$row_names)) expect_equal_df(mtcars_out[names(mtcars_out) != "row_names"], unrowname(mtcars_in)) }, # #' read_table_check_names = function(ctx, con, table_name) { #' If the database supports identifiers with special characters, if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } #' the columns in the returned data frame are converted to valid R #' identifiers test_in <- data.frame(a = 1:3, b = 4:6) names(test_in) <- c("with spaces", "with,comma") dbWriteTable(con, table_name, test_in) #' if the `check.names` argument is `TRUE`, test_out <- check_df(dbReadTable(con, table_name, check.names = TRUE)) expect_identical(names(test_out), make.names(names(test_out), unique = TRUE)) expect_equal_df(test_out, setNames(test_in, names(test_out))) }, # read_table_check_names_false = function(ctx, con, table_name) { if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } #' If `check.names = FALSE`, the returned table has non-syntactic column names without quotes. test_in <- data.frame(a = 1:3, b = 4:6) names(test_in) <- c("with spaces", "with,comma") dbWriteTable(con, table_name, test_in) test_out <- check_df(dbReadTable(con, table_name, check.names = FALSE)) expect_equal_df(test_out, test_in) }, #' read_table_closed_connection = function(ctx, con, table_name) { #' @section Failure modes: #' An error is raised when calling this method for a closed dbWriteTable(con, table_name, data.frame(a = 1)) con2 <- local_closed_connection(ctx = ctx) expect_error(dbReadTable(con2, table_name)) }, read_table_invalid_connection = function(ctx, con, table_name) { #' or invalid connection. dbWriteTable(con, table_name, data.frame(a = 1)) con2 <- local_invalid_connection(ctx) expect_error(dbReadTable(con2, table_name)) }, read_table_error = function(ctx, con, table_name) { #' An error is raised dbWriteTable(con, table_name, data.frame(a = 1L)) #' if `name` cannot be processed with [dbQuoteIdentifier()] expect_error(dbReadTable(con, NA)) #' or if this results in a non-scalar. expect_error(dbReadTable(con, c(table_name, table_name))) #' Unsupported values for `row.names` and `check.names` #' (non-scalars, expect_error(dbReadTable(con, table_name, row.names = letters)) #' unsupported data types, expect_error(dbReadTable(con, table_name, row.names = list(1L))) expect_error(dbReadTable(con, table_name, check.names = 1L)) #' `NA` for `check.names`) expect_error(dbReadTable(con, table_name, check.names = NA)) #' also raise an error. }, #' @section Additional arguments: #' The following arguments are not part of the `dbReadTable()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `row.names` (default: `FALSE`) #' - `check.names` #' #' They must be provided as named arguments. #' See the "Value" section for details on their usage. read_table_name = function(ctx, con) { #' @section Specification: #' The `name` argument is processed as follows, #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { local_remove_test_table(con, table_name) test_in <- data.frame(a = 1L) dbWriteTable(con, table_name, test_in) #' - If an unquoted table name as string: `dbReadTable()` will do the #' quoting, test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done test_out <- check_df(dbReadTable(con, dbQuoteIdentifier(con, table_name))) expect_equal_df(test_out, test_in) } }, # NULL ) DBItest/R/spec-sql-quote-identifier.R0000644000176200001440000001360214602017371017101 0ustar liggesusers#' spec_sql_quote_identifier #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_quote_identifier <- list( quote_identifier_formals = function() { # expect_equal(names(formals(dbQuoteIdentifier)), c("conn", "x", "...")) }, quote_identifier_return = function(con) { #' @return #' `dbQuoteIdentifier()` returns an object that can be coerced to [character], simple_out <- dbQuoteIdentifier(con, "simple") expect_error(as.character(simple_out), NA) expect_type(as.character(simple_out), "character") }, # quote_identifier_vectorized = function(ctx, con) { #' of the same length as the input. simple <- "simple" simple_out <- dbQuoteIdentifier(con, simple) expect_equal(length(simple_out), 1L) letters_out <- dbQuoteIdentifier(con, letters) expect_equal(length(letters_out), length(letters)) #' For an empty character vector this function returns a length-0 object. empty <- character() empty_out <- dbQuoteIdentifier(con, empty) expect_equal(length(empty_out), 0L) #' The names of the input argument are preserved in the output. unnamed <- letters unnamed_out <- dbQuoteIdentifier(con, unnamed) expect_null(names(unnamed_out)) named <- stats::setNames(LETTERS[1:3], letters[1:3]) named_out <- dbQuoteIdentifier(con, named) expect_equal(names(named_out), letters[1:3]) #' When passing the returned object again to `dbQuoteIdentifier()` #' as `x` #' argument, it is returned unchanged. expect_identical(dbQuoteIdentifier(con, simple_out), simple_out) expect_identical(dbQuoteIdentifier(con, letters_out), letters_out) expect_identical(dbQuoteIdentifier(con, empty_out), empty_out) #' Passing objects of class [SQL] should also return them unchanged. expect_identical(dbQuoteIdentifier(con, SQL(simple)), SQL(simple)) expect_identical(dbQuoteIdentifier(con, SQL(letters)), SQL(letters)) expect_identical(dbQuoteIdentifier(con, SQL(empty)), SQL(empty)) #' (For backends it may be most convenient to return [SQL] objects #' to achieve this behavior, but this is not required.) }, #' quote_identifier_error = function(ctx, con) { #' @section Failure modes: #' #' An error is raised if the input contains `NA`, expect_error(dbQuoteIdentifier(con, NA)) expect_error(dbQuoteIdentifier(con, NA_character_)) expect_error(dbQuoteIdentifier(con, c("a", NA_character_))) #' but not for an empty string. expect_error(dbQuoteIdentifier(con, ""), NA) }, quote_identifier = function(ctx, con) { #' @section Specification: #' Calling [dbGetQuery()] for a query of the format `SELECT 1 AS ...` #' returns a data frame with the identifier, unquoted, as column name. #' Quoted identifiers can be used as table and column names in SQL queries, simple <- dbQuoteIdentifier(con, "simple") #' in particular in queries like `SELECT 1 AS ...` query <- trivial_query(column = simple) rows <- check_df(dbGetQuery(con, query)) expect_identical(names(rows), "simple") expect_identical(unlist(unname(rows)), 1.5) #' and `SELECT * FROM (SELECT 1) ...`. query <- paste0("SELECT * FROM (", trivial_query(), ") ", simple) rows <- check_df(dbGetQuery(con, query)) expect_identical(unlist(unname(rows)), 1.5) }, quote_identifier_string = function(ctx, con) { #' The method must use a quoting mechanism that is unambiguously different #' from the quoting mechanism used for strings, so that a query like #' `SELECT ... FROM (SELECT 1 AS ...)` query <- paste0( "SELECT ", dbQuoteIdentifier(con, "b"), " FROM (", "SELECT 1 AS ", dbQuoteIdentifier(con, "a"), ")" ) #' throws an error if the column names do not match. eval(bquote(expect_error(dbGetQuery(con, .(query))))) }, # #' quote_identifier_special = function(ctx, con) { #' The method can quote column names that #' contain special characters such as a space, with_space_in <- "with space" with_space <- dbQuoteIdentifier(con, with_space_in) #' a dot, with_dot_in <- "with.dot" with_dot <- dbQuoteIdentifier(con, with_dot_in) #' a comma, with_comma_in <- "with,comma" with_comma <- dbQuoteIdentifier(con, with_comma_in) #' or quotes used to mark strings with_quote_in <- as.character(dbQuoteString(con, "a")) with_quote <- dbQuoteIdentifier(con, with_quote_in) #' or identifiers, empty_in <- "" empty <- dbQuoteIdentifier(con, empty_in) quoted_empty <- dbQuoteIdentifier(con, as.character(empty)) quoted_with_space <- dbQuoteIdentifier(con, as.character(with_space)) quoted_with_dot <- dbQuoteIdentifier(con, as.character(with_dot)) quoted_with_comma <- dbQuoteIdentifier(con, as.character(with_comma)) quoted_with_quote <- dbQuoteIdentifier(con, as.character(with_quote)) #' if the database supports this. if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } #' In any case, checking the validity of the identifier #' should be performed only when executing a query, #' and not by `dbQuoteIdentifier()`. query <- paste0( "SELECT ", "2.5 as", with_space, ",", "3.5 as", with_dot, ",", "4.5 as", with_comma, ",", "5.5 as", with_quote, ",", "6.5 as", quoted_empty, ",", "7.5 as", quoted_with_space, ",", "8.5 as", quoted_with_dot, ",", "9.5 as", quoted_with_comma, ",", "10.5 as", quoted_with_quote ) rows <- check_df(dbGetQuery(con, query)) expect_identical( names(rows), c( with_space_in, with_dot_in, with_comma_in, with_quote_in, as.character(empty), as.character(with_space), as.character(with_dot), as.character(with_comma), as.character(with_quote) ) ) expect_identical(unlist(unname(rows)), 2:10 + 0.5) }, # NULL ) DBItest/R/spec-meta-bind-formals.R0000644000176200001440000000110614602017371016324 0ustar liggesusers#' spec_meta_bind #' @name spec_meta_bind #' @family meta specifications #' @aliases NULL #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_bind_formals <- list( bind_formals = function() { # expect_equal(names(formals(dbBind)), c("res", "params", "...")) }, #' bind_empty = function(con) { #' @section Failure modes: #' Calling `dbBind()` for a query without parameters res <- local_result(dbSendQuery(con, trivial_query())) #' raises an error. expect_error(dbBind(res, list())) }, NULL ) DBItest/R/spec-meta-bind-arrow-stream.R0000644000176200001440000011767514725054151017332 0ustar liggesusers# Generated by helper-dev.R, do not edit by hand # Sources: R/spec-meta-bind-.R, R/spec-meta-bind-expr.R, R/spec-meta-bind-runner.R # This file is generated during load_all() if it's older than the sources spec_meta_arrow_stream_bind <- list( arrow_stream_bind_return_value = function(ctx, con) { # # @return # `dbBind()` returns the result set, # invisibly, # for queries issued by [dbSendQuery()] or [dbSendQueryArrow()] and placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbFetch(res)) expect_equal(dbGetRowCount(res), 0) expect_true(dbIsValid(res)) expect_false(dbHasCompleted(res)) bind_res <- withVisible(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values))) expect_identical(res, bind_res$value) expect_false(bind_res$visible) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_too_many = function(ctx, con) { # # @section Failure modes: # Binding too many placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- if (is.null(names(bind_values))) { c(bind_values, bind_values[[1L]]) } else { c(bind_values, bogus = bind_values[[1L]]) } placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_not_enough = function(ctx, con) { # # or not enough values, placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, 2L, check.names = FALSE), names = c("", "")) placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) bind_values_patched <- bind_values[-1L] placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_wrong_name = function(ctx, con) { # # or parameters with wrong names placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- stats::setNames(bind_values, paste0("bogus", names(bind_values))) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_named_param_unnamed_placeholders = function(ctx, con) { # # If the placeholders in the query are named, # all parameter values must have names placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- stats::setNames(bind_values, NULL) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_named_param_empty_placeholders = function(ctx, con) { # # (which must not be empty placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, 2L, check.names = FALSE), names = c("", "")) placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) bind_values_patched <- { names(bind_values)[[1]] <- "" bind_values } placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_unnamed_param_named_placeholders = function(ctx, con) { # # and vice versa, # otherwise an error is raised. placeholder_funs <- get_placeholder_funs(ctx, requires_names = FALSE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) bind_values_patched <- stats::setNames(bind_values, letters[seq_along(bind_values)]) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)), ".*") expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_premature_clear = function(ctx, con) { # # Calling `dbBind()` on a result set already cleared by [dbClearResult()] # also raises an error. placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) dbClearResult(res) expect_error(dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)), ".*") } }, arrow_stream_bind_multi_row = function(ctx, con) { # # @section Specification: # The elements of the `params` argument do not need to be scalars, # vectors of arbitrary length placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1:3, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 3L) result <- data.frame(a = c(1.5, 2.5, 2.5)) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_multi_row_zero_length = function(ctx, con) { # # (including length 0) # are supported. # For queries, calling `dbFetch()` binding such parameters returns # concatenated results, equivalent to binding and fetching for each set # of values and connecting via [rbind()]. skip_if_not_dbitest(ctx, "1.7.99.12") placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(integer(0), integer(0), check.names = FALSE), names = c("", "")) placeholder <- placeholder_fun(2L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 0L) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_repeated = function(ctx, con) { # # `dbBind()` also accepts repeated calls on the same result set # for both queries placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_repeated_untouched = function(ctx, con) { # # even if no results are fetched between calls to `dbBind()`, # for both queries placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, check.names = FALSE), names = "") placeholder <- placeholder_fun(1L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_named_param_shuffle = function(ctx, con) { # # If the placeholders in the query are named, # their order in the `params` argument is not important. placeholder_funs <- get_placeholder_funs(ctx, requires_names = TRUE) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1.5, 2.5, 3.5, NA_real_, check.names = FALSE), names = character(4)) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) bind_values_patched <- bind_values[c(3, 1, 2, 4)] placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values_patched)) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_integer = function(ctx, con) { # # At least the following data types are accepted on input (including [NA]): # - [integer] placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1L, 2L, 3L, NA_integer_, check.names = FALSE), names = character(4)) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_numeric = function(ctx, con) { # # - [numeric] placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(1.5, 2.5, 3.5, NA_real_, check.names = FALSE), names = character(4)) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_logical = function(ctx, con) { # # - [logical] for Boolean values placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure(data.frame(TRUE, FALSE, NA, check.names = FALSE), names = character(3)) placeholder <- placeholder_fun(3L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[3L] <- paste0("(", is_null_check(placeholder[3L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_character = function(ctx, con) { # # - [character] placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame("\U{41A}\U{438}\U{440}\U{438}\U{43B}\U{43B}", "M\U{FC}ller", `Encoding<-`("M\xfcller", "latin1"), "\U{6211}\U{662F}\U{8C01}", "ASCII", NA_character_, check.names = FALSE), names = character(6) ) placeholder <- placeholder_fun(6L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[6L] <- paste0("(", is_null_check(placeholder[6L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d, ") sql <- paste0(sql, "CASE WHEN ", result_check[[5L]], " THEN 1.5 ELSE 2.5 END AS e, ") sql <- paste0(sql, "CASE WHEN ", result_check[[6L]], " THEN 1.5 ELSE 2.5 END AS f") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5, e = 1.5, f = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_character_escape = function(ctx, con) { # # (also with special characters such as spaces, newlines, quotes, and backslashes) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(" ", "\n", "\r", "\b", "'", '"', "[", "]", "\\", NA_character_, check.names = FALSE), names = character(10) ) placeholder <- placeholder_fun(10L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[10L] <- paste0("(", is_null_check(placeholder[10L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d, ") sql <- paste0(sql, "CASE WHEN ", result_check[[5L]], " THEN 1.5 ELSE 2.5 END AS e, ") sql <- paste0(sql, "CASE WHEN ", result_check[[6L]], " THEN 1.5 ELSE 2.5 END AS f, ") sql <- paste0(sql, "CASE WHEN ", result_check[[7L]], " THEN 1.5 ELSE 2.5 END AS g, ") sql <- paste0(sql, "CASE WHEN ", result_check[[8L]], " THEN 1.5 ELSE 2.5 END AS h, ") sql <- paste0(sql, "CASE WHEN ", result_check[[9L]], " THEN 1.5 ELSE 2.5 END AS i, ") sql <- paste0(sql, "CASE WHEN ", result_check[[10L]], " THEN 1.5 ELSE 2.5 END AS j") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5, e = 1.5, f = 1.5, g = 1.5, h = 1.5, i = 1.5, j = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_factor = function(ctx, con) { # # - [factor] (bound as character, # with warning) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(factor("\U{41A}\U{438}\U{440}\U{438}\U{43B}\U{43B}"), factor("M\U{FC}ller"), factor(`Encoding<-`("M\xfcller", "latin1")), factor("\U{6211}\U{662F}\U{8C01}"), factor("ASCII"), factor(NA_character_), check.names = FALSE), names = character(6) ) placeholder <- placeholder_fun(6L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[6L] <- paste0("(", is_null_check(placeholder[6L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d, ") sql <- paste0(sql, "CASE WHEN ", result_check[[5L]], " THEN 1.5 ELSE 2.5 END AS e, ") sql <- paste0(sql, "CASE WHEN ", result_check[[6L]], " THEN 1.5 ELSE 2.5 END AS f") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5, e = 1.5, f = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_date = function(ctx, con) { # # - [Date] skip_if(!isTRUE(ctx$tweaks$date_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(as.Date("2023-12-17"), as.Date("2023-12-18"), as.Date("2023-12-19"), as.Date(NA), check.names = FALSE), names = character(4) ) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_date_integer = function(ctx, con) { # # (also when stored internally as integer) skip_if(!isTRUE(ctx$tweaks$date_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(structure(18618L, class = "Date"), structure(18619L, class = "Date"), structure(18620L, class = "Date"), structure(NA_integer_, class = "Date"), check.names = FALSE), names = character(4) ) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_timestamp = function(ctx, con) { # # - [POSIXct] timestamps skip_if(!isTRUE(ctx$tweaks$timestamp_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(as.POSIXct("2023-12-17 02:40:22"), as.POSIXct("2023-12-17 02:40:23"), as.POSIXct("2023-12-17 02:40:24"), as.POSIXct(NA_character_), check.names = FALSE), names = character(4) ) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_timestamp_lt = function(ctx, con) { # # - [POSIXlt] timestamps skip_if(!isTRUE(ctx$tweaks$timestamp_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(as.POSIXct("2023-12-17 02:40:49"), as.POSIXct("2023-12-17 02:40:50"), as.POSIXct("2023-12-17 02:40:51"), as.POSIXct(NA_character_), check.names = FALSE), names = character(4) ) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_time_seconds = function(ctx, con) { # # - [difftime] values skip_if(!isTRUE(ctx$tweaks$time_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(as.difftime(1, units = "secs"), as.difftime(2, units = "secs"), as.difftime(3, units = "secs"), as.difftime(NA_real_, units = "secs"), check.names = FALSE), names = character(4) ) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_time_hours = function(ctx, con) { # # (also with units other than seconds skip_if(!isTRUE(ctx$tweaks$time_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(as.difftime(1, units = "hours"), as.difftime(2, units = "hours"), as.difftime(3, units = "hours"), as.difftime(NA_real_, units = "hours"), check.names = FALSE), names = character(4) ) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_time_minutes_integer = function(ctx, con) { # # and with the value stored as integer) skip_if(!isTRUE(ctx$tweaks$time_typed)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check for (placeholder_fun in placeholder_funs) { bind_values <- structure( data.frame(as.difftime(1, units = "mins"), as.difftime(2, units = "mins"), as.difftime(3, units = "mins"), as.difftime(NA_real_, units = "mins"), check.names = FALSE), names = character(4) ) placeholder <- placeholder_fun(4L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", placeholder, " = ", placeholder_values, ")") result_check[4L] <- paste0("(", is_null_check(placeholder[4L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c, ") sql <- paste0(sql, "CASE WHEN ", result_check[[4L]], " THEN 1.5 ELSE 2.5 END AS d") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5, d = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, arrow_stream_bind_blob = function(ctx, con) { # # - objects of type [blob::blob] skip_if(isTRUE(ctx$tweaks$omit_blob_tests)) placeholder_funs <- get_placeholder_funs(ctx) is_null_check <- ctx$tweaks$is_null_check cast_fun <- ctx$tweaks$blob_cast for (placeholder_fun in placeholder_funs) { bind_values <- structure( list( blob::blob(as.raw(c(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))), blob::blob(raw(3)), structure(vctrs::list_of(NULL, .ptype = raw(0)), class = c("blob", "vctrs_list_of", "vctrs_vctr", "list")) ), names = character(3), class = "data.frame", row.names = c(NA, -1L) ) placeholder <- placeholder_fun(3L) names(bind_values) <- names(placeholder) placeholder_values <- map_chr(bind_values, function(x) DBI::dbQuoteLiteral(con, x[1])) result_check <- paste0("(", cast_fun(placeholder), " = ", placeholder_values, ")") result_check[3L] <- paste0("(", is_null_check(cast_fun(placeholder)[3L]), ")") sql <- "SELECT " sql <- paste0(sql, "CASE WHEN ", result_check[[1L]], " THEN 1.5 ELSE 2.5 END AS a, ") sql <- paste0(sql, "CASE WHEN ", result_check[[2L]], " THEN 1.5 ELSE 2.5 END AS b, ") sql <- paste0(sql, "CASE WHEN ", result_check[[3L]], " THEN 1.5 ELSE 2.5 END AS c") res <- dbSendQueryArrow(con, sql) on.exit(if (!is.null(res)) expect_error(dbClearResult(res), NA)) dbBindArrow(res, nanoarrow::as_nanoarrow_array_stream(bind_values)) rows <- check_df(dbFetch(res)) expect_equal(nrow(rows), 1L) result <- data.frame(a = 1.5, b = 1.5, c = 1.5) expect_equal(rows, result) expect_error(dbClearResult(res), NA) res <- NULL } }, NULL ) DBItest/R/spec-compliance.R0000644000176200001440000000011614602017371015135 0ustar liggesusers#' @format NULL spec_compliance <- c( spec_compliance_methods, # NULL ) DBItest/R/spec-driver-get-info.R0000644000176200001440000000144714602017371016034 0ustar liggesusers#' spec_driver_get_info #' @family driver specifications #' @usage NULL #' @format NULL #' @keywords NULL #' @name spec_get_info spec_driver_get_info <- list( get_info_driver = function(ctx) { #' @return #' For objects of class [DBIDriver-class], `dbGetInfo()` info <- dbGetInfo(ctx$drv) #' returns a named list expect_type(info, "list") info_names <- names(info) #' that contains at least the following components: #' necessary_names <- c( #' - `driver.version`: the package version of the DBI backend, "driver.version", #' - `client.version`: the version of the DBMS client library. "client.version" ) for (name in necessary_names) { eval(bquote( expect_true(.(name) %in% info_names) )) } }, # NULL ) DBItest/R/spec-sql-list-fields.R0000644000176200001440000000642314602017371016046 0ustar liggesusers#' spec_sql_list_fields #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_list_fields <- list( list_fields_formals = function() { # expect_equal(names(formals(dbListFields)), c("conn", "name", "...")) }, list_fields = function(ctx, con, table_name) { #' @return #' `dbListFields()` penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) fields <- dbListFields(con, table_name) #' returns a character vector expect_type(fields, "character") #' that enumerates all fields #' in the table in the correct order. expect_identical(fields, names(penguins)) }, list_fields_temporary = function(ctx, con, table_name) { #' This also works for temporary tables if supported by the database. if (isTRUE(ctx$tweaks$temporary_tables) && isTRUE(ctx$tweaks$list_temporary_tables)) { dbWriteTable(con, table_name, data.frame(a = 1L, b = 2L), temporary = TRUE) fields <- dbListFields(con, table_name) expect_equal(fields, c("a", "b")) #' The returned names are suitable for quoting with `dbQuoteIdentifier()`. expect_equal(dbQuoteIdentifier(con, fields), dbQuoteIdentifier(con, c("a", "b"))) } }, #' list_fields_wrong_table = function(con) { #' @section Failure modes: #' If the table does not exist, an error is raised. name <- "missing" stopifnot(!dbExistsTable(con, name)) expect_error(dbListFields(con, name)) }, list_fields_invalid_type = function(con) { #' Invalid types for the `name` argument #' (e.g., `character` of length not equal to one, expect_error(dbListFields(con, character())) expect_error(dbListFields(con, letters)) #' or numeric) expect_error(dbListFields(con, 1)) #' lead to an error. }, list_fields_closed_connection = function(ctx, closed_con) { #' An error is also raised when calling this method for a closed expect_error(dbListFields(closed_con, "test")) }, list_fields_invalid_connection = function(ctx, invalid_con) { #' or invalid connection. expect_error(dbListFields(invalid_con, "test")) }, list_fields_quoted = function(con, table_name) { #' @section Specification: #' #' The `name` argument can be #' #' - a string #' - the return value of [dbQuoteIdentifier()] dbWriteTable(con, table_name, data.frame(a = 1L, b = 2L)) expect_identical( dbListFields(con, dbQuoteIdentifier(con, table_name)), c("a", "b") ) }, list_fields_object = function(con, table_name) { #' - a value from the `table` column from the return value of #' [dbListObjects()] where `is_prefix` is `FALSE` dbWriteTable(con, table_name, data.frame(a = 1L, b = 2L)) objects <- dbListObjects(con) expect_gt(nrow(objects), 0) expect_false(all(objects$is_prefix)) expect_identical( dbListFields(con, objects$table[[1]]), dbListFields(con, dbQuoteIdentifier(con, objects$table[[1]])) ) }, #' list_fields_row_names = function(con, table_name) { #' A column named `row_names` is treated like any other column. dbWriteTable(con, table_name, data.frame(a = 1L, row_names = 2L)) expect_identical(dbListFields(con, table_name), c("a", "row_names")) }, # NULL ) DBItest/R/spec-all.R0000644000176200001440000000026614602017371013601 0ustar liggesusersspec_all <- c( spec_getting_started, spec_driver, spec_connection, spec_result, spec_sql, spec_meta, spec_transaction, spec_arrow, spec_compliance, spec_stress ) DBItest/R/spec-arrow-fetch-arrow-chunk.R0000644000176200001440000000617414602020561017507 0ustar liggesusers#' spec_arrow_fetch_arrow_chunk #' @family Arrow specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_arrow_fetch_arrow_chunk <- list( arrow_fetch_arrow_chunk_formals = function() { # expect_equal(names(formals(dbFetchArrowChunk)), c("res", "...")) }, arrow_fetch_arrow_chunk_atomic = function(con) { #' @return #' `dbFetchArrowChunk()` always returns an object coercible to a [data.frame] with #' as many rows as records were fetched and as many #' columns as fields in the result set, #' even if the result is a single value query <- trivial_query() res <- local_result(dbSendQueryArrow(con, query)) rows <- check_arrow(dbFetchArrowChunk(res)) expect_equal(rows, data.frame(a = 1.5)) }, arrow_fetch_arrow_chunk_one_row = function(con) { #' or has one query <- trivial_query(3, letters[1:3]) result <- trivial_df(3, letters[1:3]) res <- local_result(dbSendQueryArrow(con, query)) rows <- check_arrow(dbFetchArrowChunk(res)) expect_identical(rows, result) }, arrow_fetch_arrow_chunk_zero_rows = function(con) { #' or zero rows. query <- "SELECT * FROM (SELECT 1 as a, 2 as b, 3 as c) AS x WHERE (1 = 0)" res <- local_result(dbSendQueryArrow(con, query)) rows <- check_arrow(dbFetchArrowChunk(res)) expect_identical(class(rows), "data.frame") }, #' arrow_fetch_arrow_chunk_closed = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.11") #' @section Failure modes: #' An attempt to fetch from a closed result set raises an error. query <- trivial_query() res <- dbSendQueryArrow(con, query) dbClearResult(res) expect_error(dbFetchArrowChunk(res)) }, arrow_fetch_arrow_chunk_multi_row_single_column = function(ctx, con) { #' @section Specification: #' Fetching multi-row queries with one query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(3) res <- local_result(dbSendQueryArrow(con, query)) rows <- check_arrow(dbFetchArrowChunk(res)) expect_identical(rows, result) }, arrow_fetch_arrow_chunk_multi_row_multi_column = function(ctx, con) { #' or more columns returns the next chunk. #' The size of the chunk is implementation-specific. query <- sql_union( .ctx = ctx, paste("SELECT", 1:5 + 0.5, "AS a,", 4:0 + 0.5, "AS b"), .order_by = "a" ) res <- local_result(dbSendQueryArrow(con, query)) rows <- check_arrow(dbFetchArrowChunk(res)) expect_identical(rows, data.frame(a = 1:5 + 0.5, b = 4:0 + 0.5)) }, arrow_fetch_arrow_chunk_array = function(ctx, con) { #' The object returned by `dbFetchArrowChunk()` can also be passed to #' [nanoarrow::as_nanoarrow_array()] to create a nanoarrow array object. query <- trivial_query(25, .ctx = ctx, .order_by = "a") result <- trivial_df(25) res <- local_result(dbSendQueryArrow(con, query)) chunk <- dbFetchArrowChunk(res) rbr <- nanoarrow::as_nanoarrow_array(chunk) #' The chunk size is implementation-specific. out <- as.data.frame(rbr) expect_equal(out, head(result, nrow(out))) }, # NULL ) DBItest/R/spec-transaction-with-transaction.R0000644000176200001440000000627514602017371020660 0ustar liggesusers#' spec_transaction_with_transaction #' @family transaction specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_transaction_with_transaction <- list( with_transaction_formals = function() { # expect_equal(names(formals(dbWithTransaction)), c("conn", "code", "...")) }, with_transaction_return_value = function(con) { #' @return #' `dbWithTransaction()` returns the value of the executed code. name <- random_table_name() expect_identical(dbWithTransaction(con, name), name) }, #' with_transaction_error_closed = function(ctx, closed_con) { #' @section Failure modes: #' Failure to initiate the transaction #' (e.g., if the connection is closed expect_error(dbWithTransaction(closed_con, NULL)) }, with_transaction_error_invalid = function(ctx, invalid_con) { #' or invalid expect_error(dbWithTransaction(invalid_con, NULL)) }, with_transaction_error_nested = function(con) { #' of if [dbBegin()] has been called already) dbBegin(con) #' gives an error. expect_error(dbWithTransaction(con, NULL)) dbRollback(con) }, with_transaction_success = function(con, table_name) { #' @section Specification: #' `dbWithTransaction()` initiates a transaction with `dbBegin()`, executes #' the code given in the `code` argument, and commits the transaction with #' [dbCommit()]. dbWriteTable(con, table_name, data.frame(a = 0L), overwrite = TRUE) dbWithTransaction( con, { dbWriteTable(con, table_name, data.frame(a = 1L), append = TRUE) expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0:1)) } ) expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0:1)) }, with_transaction_failure = function(con, table_name) { #' If the code raises an error, the transaction is instead aborted with #' [dbRollback()], and the error is propagated. dbWriteTable(con, table_name, data.frame(a = 0L), overwrite = TRUE) name <- random_table_name() expect_error( dbWithTransaction( con, { dbWriteTable(con, table_name, data.frame(a = 1L), append = TRUE) stop(name) } ), name, fixed = TRUE ) expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0L)) }, with_transaction_break = function(con, table_name) { #' If the code calls `dbBreak()`, execution of the code stops and the #' transaction is silently aborted. dbWriteTable(con, table_name, data.frame(a = 0L), overwrite = TRUE) expect_error( dbWithTransaction( con, { dbWriteTable(con, table_name, data.frame(a = 1L), append = TRUE) dbBreak() } ), NA ) expect_equal(check_df(dbReadTable(con, table_name)), data.frame(a = 0L)) }, with_transaction_side_effects = function(con) { #' All side effects caused by the code expect_false(exists("a", inherits = FALSE)) #' (such as the creation of new variables) dbWithTransaction(con, a <- 42) #' propagate to the calling environment. expect_identical(get0("a", inherits = FALSE), 42) }, # NULL ) DBItest/R/spec-sql-list-objects.R0000644000176200001440000001344114602061640016225 0ustar liggesusers#' spec_sql_list_objects #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_list_objects <- list( list_objects_formals = function() { # expect_equal(names(formals(dbListObjects)), c("conn", "prefix", "...")) }, list_objects_1 = function(ctx, con, table_name = "dbit06") { #' @return #' `dbListObjects()` objects <- dbListObjects(con) #' returns a data frame expect_s3_class(objects, "data.frame") #' with columns cols <- c("table", "is_prefix") #' `table` and `is_prefix` (in that order), expect_equal(names(objects)[seq_along(cols)], cols) #' optionally with other columns with a dot (`.`) prefix. expect_true(all(grepl("^[.]", names(objects)[-seq_along(cols)]))) #' The `table` column is of type list. expect_equal(typeof(objects$table), "list") #' Each object in this list is suitable for use as argument in [dbQuoteIdentifier()]. expect_error(map(objects$table, dbQuoteIdentifier, conn = con), NA) #' The `is_prefix` column is a logical. expect_type(objects$is_prefix, "logical") #' This data frame contains one row for each object (schema, table expect_false(table_name %in% objects) #' and view) # TODO #' accessible from the prefix (if passed) or from the global namespace #' (if prefix is omitted). #' Tables added with [dbWriteTable()] are penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) #' part of the data frame. objects <- dbListObjects(con) quoted_tables <- map_chr(objects$table, dbQuoteIdentifier, conn = con) expect_true(dbQuoteIdentifier(con, table_name) %in% quoted_tables) }, # second stage list_objects_2 = function(ctx, con) { # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit06" #' As soon a table is removed from the database, #' it is also removed from the data frame of database objects. objects <- dbListObjects(con) quoted_tables <- map_chr(objects$table, dbQuoteIdentifier, conn = con) expect_false(dbQuoteIdentifier(con, table_name) %in% quoted_tables) }, #' list_objects_temporary = function(ctx, con, table_name) { #' The same applies to temporary objects if supported by the database. if (isTRUE(ctx$tweaks$temporary_tables) && isTRUE(ctx$tweaks$list_temporary_tables)) { dbWriteTable(con, table_name, data.frame(a = 1L), temporary = TRUE) objects <- dbListObjects(con) quoted_tables <- map_chr(objects$table, dbQuoteIdentifier, conn = con) expect_true(dbQuoteIdentifier(con, table_name) %in% quoted_tables) } }, #' list_objects_quote = function(ctx, con) { #' The returned names are suitable for quoting with `dbQuoteIdentifier()`. if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { local_remove_test_table(con, table_name) dbWriteTable(con, dbQuoteIdentifier(con, table_name), data.frame(a = 2L)) objects <- dbListObjects(con) quoted_tables <- map_chr(objects$table, dbQuoteIdentifier, conn = con) expect_true(dbQuoteIdentifier(con, table_name) %in% quoted_tables) } }, #' list_objects_closed_connection = function(ctx, closed_con) { #' @section Failure modes: #' An error is raised when calling this method for a closed expect_error(dbListObjects(closed_con)) }, list_objects_invalid_connection = function(ctx, invalid_con) { #' or invalid connection. expect_error(dbListObjects(invalid_con)) }, list_objects_features = function(ctx, con) { #' @section Specification: objects <- dbListObjects(con) #' The `prefix` column indicates if the `table` value refers to a table #' or a prefix. #' For a call with the default `prefix = NULL`, the `table` #' values that have `is_prefix == FALSE` correspond to the tables #' returned from [dbListTables()], non_prefix_objects <- map_chr( objects$table[!objects$is_prefix], dbQuoteIdentifier, conn = con ) all_tables <- dbQuoteIdentifier(con, dbListTables(con)) expect_equal(sort(non_prefix_objects), sort(as.character(all_tables))) #' #' The `table` object can be quoted with [dbQuoteIdentifier()]. sql <- map(objects$table[!objects$is_prefix], dbQuoteIdentifier, conn = con) #' The result of quoting can be passed to [dbUnquoteIdentifier()]. expect_error(walk(sql, dbUnquoteIdentifier, conn = con), NA) #' (For backends it may be convenient to use the [Id] class, but this is #' not required.) if (!any(objects$is_prefix)) { skip("No schemas available") } #' #' Values in `table` column that have `is_prefix == TRUE` can be #' passed as the `prefix` argument to another call to `dbListObjects()`. #' For the data frame returned from a `dbListObject()` call with the #' `prefix` argument set, all `table` values where `is_prefix` is #' `FALSE` can be used in a call to [dbExistsTable()] which returns #' `TRUE`. for (schema in utils::head(objects$table[objects$is_prefix])) { sub_objects <- dbListObjects(con, prefix = schema) for (sub_table in utils::head(sub_objects$table[!sub_objects$is_prefix])) { # HACK HACK HACK for RMariaDB on OS X (#188) if (!identical(sub_table, Id(schema = "information_schema", table = "FILES"))) { # eval(bquote()) preserves the SQL class, even if it's not apparent # in the output eval(bquote(expect_true( dbExistsTable(con, .(sub_table)), label = paste0("dbExistsTable(", dbQuoteIdentifier(con, sub_table), ")") ))) } } } }, # NULL ) DBItest/R/spec-sql-append-table.R0000644000176200001440000004476114602061640016170 0ustar liggesusers#' spec_sql_append_table #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_append_table <- list( append_table_formals = function() { # expect_equal(names(formals(dbAppendTable)), c("conn", "name", "value", "...", "row.names")) }, append_table_return = function(con, table_name) { #' @return #' `dbAppendTable()` returns a test_in <- trivial_df() dbCreateTable(con, table_name, test_in) ret <- dbAppendTable(con, table_name, test_in) #' scalar expect_equal(length(ret), 1) #' numeric. expect_true(is.numeric(ret)) }, #' append_table_missing = function(con, table_name) { #' @section Failure modes: #' If the table does not exist, stopifnot(!dbExistsTable(con, table_name)) expect_error(dbAppendTable(con, table_name, data.frame(a = 2L))) }, append_table_invalid_value = function(con, table_name) { #' or the new data in `values` is not a data frame or has different column names, #' an error is raised; the remote table remains unchanged. test_in <- trivial_df() dbCreateTable(con, table_name, test_in) expect_error(dbAppendTable(con, table_name, unclass(test_in))) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in[0, , drop = FALSE]) }, append_table_append_incompatible = function(con, table_name) { test_in <- trivial_df() dbCreateTable(con, table_name, test_in) dbAppendTable(con, table_name, test_in) expect_error(dbAppendTable(con, table_name, data.frame(b = 2L))) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, #' append_table_closed_connection = function(ctx, closed_con) { #' An error is raised when calling this method for a closed expect_error(dbAppendTable(closed_con, "test", data.frame(a = 1))) }, append_table_invalid_connection = function(ctx, invalid_con) { #' or invalid connection. expect_error(dbAppendTable(invalid_con, "test", data.frame(a = 1))) }, append_table_error = function(con, table_name) { #' An error is also raised test_in <- data.frame(a = 1L) #' if `name` cannot be processed with [dbQuoteIdentifier()] or expect_error(dbAppendTable(con, NA, test_in)) #' if this results in a non-scalar. expect_error(dbAppendTable(con, c("test", "test"), test_in)) #' Invalid values for the `row.names` argument #' (non-scalars, expect_error(dbAppendTable(con, "test", test_in, row.names = letters)) #' unsupported data types, expect_error(dbAppendTable(con, "test", test_in, row.names = list(1L))) #' `NA`) expect_error(dbAppendTable(con, "test", test_in, row.names = NA)) #' also raise an error. }, #' append_roundtrip_keywords = function(con) { #' @section Specification: #' SQL keywords can be used freely in table names, column names, and data. tbl_in <- data.frame( select = "unique", from = "join", where = "order", stringsAsFactors = FALSE ) test_table_roundtrip(use_append = TRUE, con, tbl_in, name = "exists") }, append_roundtrip_quotes = function(ctx, con, table_name) { #' Quotes, commas, spaces, and other special characters such as newlines and tabs, #' can also be used in the data, tbl_in <- data.frame( as.character(dbQuoteString(con, "")), as.character(dbQuoteIdentifier(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\Nb", "a\\tb", "a\\rb", "a\\bb", "a\\Zb", stringsAsFactors = FALSE ) names(tbl_in) <- letters[seq_along(tbl_in)] test_table_roundtrip(con, tbl_in, use_append = TRUE) }, append_roundtrip_quotes_table_names = function(ctx, con) { #' and, if the database supports non-syntactic identifiers, #' also for table names if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } table_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\Nb", "a\\tb", "a\\rb", "a\\bb", "a\\Zb" ) tbl_in <- trivial_df() for (table_name in table_names) { test_table_roundtrip_one(con, tbl_in, use_append = TRUE, .add_na = FALSE) } }, append_roundtrip_quotes_column_names = function(ctx, con) { #' and column names. if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } column_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\nb", "a\\tb", "a\\rb", "a\\bb", "a\\zb" ) tbl_in <- trivial_df(length(column_names), column_names) test_table_roundtrip_one(con, tbl_in, use_append = TRUE, .add_na = FALSE) }, #' append_roundtrip_integer = function(con) { #' The following data types must be supported at least, #' and be read identically with [dbReadTable()]: #' - integer tbl_in <- data.frame(a = c(1:5)) test_table_roundtrip(use_append = TRUE, con, tbl_in) }, append_roundtrip_numeric = function(con) { #' - numeric tbl_in <- data.frame(a = c(seq(1, 3, by = 0.5))) test_table_roundtrip(use_append = TRUE, con, tbl_in) #' (the behavior for `Inf` and `NaN` is not specified) }, append_roundtrip_logical = function(ctx, con) { #' - logical tbl_in <- data.frame(a = c(TRUE, FALSE, NA)) tbl_exp <- tbl_in tbl_exp$a <- ctx$tweaks$logical_return(tbl_exp$a) test_table_roundtrip(use_append = TRUE, con, tbl_in, tbl_exp) }, append_roundtrip_null = function(con) { #' - `NA` as NULL tbl_in <- data.frame(a = NA) test_table_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { tbl_out$a <- as.logical(tbl_out$a) # Plain NA is of type logical tbl_out } ) }, #' - 64-bit values (using `"bigint"` as field type); the result can be append_roundtrip_64_bit_numeric = function(ctx, con) { tbl_in <- data.frame(a = c(-1e14, 1e15)) test_table_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { #' - converted to a numeric, which may lose precision, tbl_out$a <- as.numeric(tbl_out$a) tbl_out }, field.types = c(a = "BIGINT") ) }, # append_roundtrip_64_bit_character = function(ctx, con) { tbl_in <- data.frame(a = c(-1e14, 1e15)) tbl_exp <- tbl_in tbl_exp$a <- format(tbl_exp$a, scientific = FALSE) test_table_roundtrip( use_append = TRUE, con, tbl_in, tbl_exp, transform = function(tbl_out) { #' - converted a character vector, which gives the full decimal #' representation tbl_out$a <- as.character(tbl_out$a) tbl_out }, field.types = c(a = "BIGINT") ) }, # append_roundtrip_64_bit_roundtrip = function(con, table_name) { tbl_in <- data.frame(a = c(-1e14, 1e15)) dbWriteTable(con, table_name, tbl_in, field.types = c(a = "BIGINT")) tbl_out <- dbReadTable(con, table_name) #' - written to another table and read again unchanged test_table_roundtrip(use_append = TRUE, con, tbl_out, tbl_expected = tbl_out) }, append_roundtrip_character = function(con) { #' - character (in both UTF-8 tbl_in <- data.frame( id = seq_along(get_texts()), a = get_texts(), stringsAsFactors = FALSE ) test_table_roundtrip(use_append = TRUE, con, tbl_in) }, append_roundtrip_character_native = function(con) { #' and native encodings), tbl_in <- data.frame( a = c(enc2native(get_texts())), stringsAsFactors = FALSE ) test_table_roundtrip(use_append = TRUE, con, tbl_in) }, append_roundtrip_character_empty = function(con) { #' supporting empty strings tbl_in <- data.frame( a = c("", "a"), stringsAsFactors = FALSE ) test_table_roundtrip(use_append = TRUE, con, tbl_in) }, append_roundtrip_character_empty_after = function(con) { #' (before and after non-empty strings) tbl_in <- data.frame( a = c("a", ""), stringsAsFactors = FALSE ) test_table_roundtrip(use_append = TRUE, con, tbl_in) }, append_roundtrip_factor = function(con) { #' - factor (returned as character, tbl_in <- data.frame( a = factor(get_texts()) ) tbl_exp <- tbl_in tbl_exp$a <- as.character(tbl_exp$a) #' with a warning) suppressWarnings( expect_warning( test_table_roundtrip(use_append = TRUE, con, tbl_in, tbl_exp) ) ) }, append_roundtrip_raw = function(ctx, con) { #' - list of raw #' (if supported by the database) if (isTRUE(ctx$tweaks$omit_blob_tests)) { skip("tweak: omit_blob_tests") } tbl_in <- data.frame(id = 1L, a = I(list(as.raw(0:10)))) tbl_exp <- tbl_in tbl_exp$a <- blob::as_blob(unclass(tbl_in$a)) test_table_roundtrip( use_append = TRUE, con, tbl_in, tbl_exp, transform = function(tbl_out) { tbl_out$a <- blob::as_blob(tbl_out$a) tbl_out } ) }, append_roundtrip_blob = function(ctx, con) { #' - objects of type [blob::blob] #' (if supported by the database) if (isTRUE(ctx$tweaks$omit_blob_tests)) { skip("tweak: omit_blob_tests") } tbl_in <- data.frame(id = 1L, a = blob::blob(as.raw(0:10))) test_table_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { tbl_out$a <- blob::as_blob(tbl_out$a) tbl_out } ) }, append_roundtrip_date = function(ctx, con) { #' - date #' (if supported by the database; if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } #' returned as `Date`) tbl_in <- data.frame(a = as_numeric_date(c(Sys.Date() + 1:5))) test_table_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { expect_type(unclass(tbl_out$a), "double") tbl_out } ) }, append_roundtrip_date_extended = function(ctx, con) { #' also for dates prior to 1970 or 1900 or after 2038 if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } tbl_in <- data.frame(a = as_numeric_date(c( "1811-11-11", "1899-12-31", "1900-01-01", "1950-05-05", "1969-12-31", "1970-01-01", "2037-01-01", "2038-01-01", "2040-01-01", "2999-09-09" ))) test_table_roundtrip( use_append = TRUE, con, tbl_in, transform = function(tbl_out) { expect_type(unclass(tbl_out$a), "double") tbl_out } ) }, append_roundtrip_time = function(ctx, con) { #' - time #' (if supported by the database; if (!isTRUE(ctx$tweaks$time_typed)) { skip("tweak: !time_typed") } tbl_in <- data.frame(a = hms::hms(minutes = 1:5)) tbl_in$b <- .difftime(as.numeric(tbl_in$a) / 60, "mins") tbl_exp <- tbl_in tbl_exp$a <- hms::as_hms(tbl_exp$a) tbl_exp$b <- hms::as_hms(tbl_exp$b) test_table_roundtrip( con, tbl_in, tbl_exp, transform = function(tbl_out) { #' returned as objects that inherit from `difftime`) expect_s3_class(tbl_out$a, "difftime") expect_s3_class(tbl_out$b, "difftime") tbl_out$a <- hms::as_hms(tbl_out$a) tbl_out$b <- hms::as_hms(tbl_out$b) tbl_out } ) }, append_roundtrip_timestamp = function(ctx, con) { #' - timestamp #' (if supported by the database; if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } #' returned as `POSIXct` local <- round(Sys.time()) + c( 1, 60, 3600, 86400, 86400 * 90, 86400 * 180, 86400 * 270, 1e9, 5e9 ) attr(local, "tzone") <- "" tbl_in <- data.frame(id = seq_along(local)) tbl_in$local <- local tbl_in$gmt <- lubridate::with_tz(local, tzone = "GMT") tbl_in$pst8pdt <- lubridate::with_tz(local, tzone = "PST8PDT") tbl_in$utc <- lubridate::with_tz(local, tzone = "UTC") #' respecting the time zone but not necessarily preserving the #' input time zone), test_table_roundtrip( use_append = TRUE, con, tbl_in, transform = function(out) { dates <- map_lgl(out, inherits, "POSIXt") tz <- toupper(names(out)) tz[tz == "LOCAL"] <- "" out[dates] <- Map(lubridate::with_tz, out[dates], tz[dates]) out } ) }, append_roundtrip_timestamp_extended = function(ctx, con) { #' also for timestamps prior to 1970 or 1900 or after 2038 if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } local <- as.POSIXct(c( "1811-11-11", "1899-12-31", "1900-01-01", "1950-05-05", "1969-12-31", "1970-01-01", "2037-01-01", "2038-01-01", "2040-01-01", "2999-09-09" )) attr(local, "tzone") <- "" tbl_in <- data.frame(id = seq_along(local)) tbl_in$local <- local tbl_in$gmt <- lubridate::with_tz(local, tzone = "GMT") tbl_in$pst8pdt <- lubridate::with_tz(local, tzone = "PST8PDT") tbl_in$utc <- lubridate::with_tz(local, tzone = "UTC") #' respecting the time zone but not necessarily preserving the #' input time zone) test_table_roundtrip( use_append = TRUE, con, tbl_in, transform = function(out) { dates <- map_lgl(out, inherits, "POSIXt") tz <- toupper(names(out)) tz[tz == "LOCAL"] <- "" out[dates] <- Map(lubridate::with_tz, out[dates], tz[dates]) out } ) }, #' append_roundtrip_mixed = function(con) { #' Mixing column types in the same table is supported. data <- list("a", 1L, 1.5) data <- map(data, c, NA) expanded <- expand.grid(a = data, b = data, c = data) tbl_in_list <- map( seq_len(nrow(expanded)), function(i) { as.data.frame(map(expanded[i, ], unlist, recursive = FALSE)) } ) map(tbl_in_list, test_table_roundtrip, con = con) }, append_table_name = function(ctx, con) { #' @section Specification: #' The `name` argument is processed as follows, #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- trivial_df() local_remove_test_table(con, table_name) #' - If an unquoted table name as string: `dbAppendTable()` will do the quoting, dbCreateTable(con, table_name, test_in) dbAppendTable(con, table_name, test_in) test_out <- check_df(dbReadTable(con, dbQuoteIdentifier(con, table_name))) expect_equal_df(test_out, test_in) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` } }, append_table_name_quoted = function(ctx, con) { #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done skip_if_not_dbitest(ctx, "1.7.2") #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- trivial_df() local_remove_test_table(con, table_name) dbCreateTable(con, dbQuoteIdentifier(con, table_name), test_in) dbAppendTable(con, dbQuoteIdentifier(con, table_name), test_in) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) } }, #' append_table_row_names_false = function(con, table_name) { #' #' The `row.names` argument must be `NULL`, the default value. mtcars_in <- datasets::mtcars dbCreateTable(con, table_name, mtcars_in) dbAppendTable(con, table_name, mtcars_in) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_false("row_names" %in% names(mtcars_out)) expect_equal_df(mtcars_out, unrowname(mtcars_in)) }, append_table_row_names_ignore = function(con, table_name) { #' Row names are ignored. mtcars_in <- datasets::mtcars dbCreateTable(con, table_name, mtcars_in) dbAppendTable(con, table_name, mtcars_in, row.names = NULL) mtcars_out <- check_df(dbReadTable(con, table_name, row.names = FALSE)) expect_false("row_names" %in% names(mtcars_out)) expect_equal_df(mtcars_out, unrowname(mtcars_in)) }, # #' append_table_row_names_non_null = function(con, table_name) { #' @section Failure modes: #' Passing a `value` argument different to `NULL` to the `row.names` argument mtcars_in <- datasets::mtcars dbCreateTable(con, table_name, mtcars_in) #' (in particular `TRUE`, expect_error(dbAppendTable(con, table_name, mtcars_in, row.names = TRUE)) #' `NA`, expect_error(dbAppendTable(con, table_name, mtcars_in, row.names = NA)) #' and a string) expect_error(dbAppendTable(con, table_name, mtcars_in, row.names = "make_model")) #' raises an error. }, #' append_table_value_df = function(con, table_name) { #' @section Specification: #' The `value` argument must be a data frame test_in <- trivial_df() dbCreateTable(con, table_name, test_in) dbAppendTable(con, table_name, test_in) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, append_table_value_subset = function(ctx, con, table_name) { #' with a subset of the columns of the existing table. test_in <- trivial_df(3, letters[1:3]) dbCreateTable(con, table_name, test_in) dbAppendTable(con, table_name, test_in[2]) test_out <- check_df(dbReadTable(con, table_name)) test_in[c(1, 3)] <- NA_real_ expect_equal_df(test_out, test_in) }, append_table_value_shuffle = function(ctx, con, table_name) { #' The order of the columns does not matter. test_in <- trivial_df(3, letters[1:3]) dbCreateTable(con, table_name, test_in) dbAppendTable(con, table_name, test_in[c(2, 3, 1)]) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, # append_table_value_shuffle_subset = function(ctx, con, table_name) { test_in <- trivial_df(4, letters[1:4]) dbCreateTable(con, table_name, test_in) dbAppendTable(con, table_name, test_in[c(4, 1, 3)]) test_out <- check_df(dbReadTable(con, table_name)) test_in[2] <- NA_real_ expect_equal_df(test_out, test_in) }, # NULL ) DBItest/R/spec-result-get-query.R0000644000176200001440000001745314602020561016270 0ustar liggesusers#' spec_result_get_query #' @family result specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_result_get_query <- list( get_query_formals = function() { # expect_equal(names(formals(dbGetQuery)), c("conn", "statement", "...")) }, get_query_atomic = function(con) { #' @return #' `dbGetQuery()` always returns a [data.frame], with #' as many rows as records were fetched and as many #' columns as fields in the result set, #' even if the result is a single value query <- trivial_query() rows <- check_df(dbGetQuery(con, query)) expect_equal(rows, data.frame(a = 1.5)) }, get_query_one_row = function(con) { #' or has one query <- trivial_query(3, letters[1:3]) result <- trivial_df(3, letters[1:3]) rows <- check_df(dbGetQuery(con, query)) expect_identical(rows, result) }, get_query_zero_rows = function(con) { #' or zero rows. # Not all SQL dialects seem to support the query used here. query <- "SELECT * FROM (SELECT 1 as a, 2 as b, 3 as c) AS x WHERE (1 = 0)" rows <- check_df(dbGetQuery(con, query)) expect_identical(names(rows), letters[1:3]) expect_identical(dim(rows), c(0L, 3L)) }, #' get_query_closed_connection = function(ctx, closed_con) { #' @section Failure modes: #' An error is raised when issuing a query over a closed expect_error(dbGetQuery(closed_con, trivial_query())) }, get_query_invalid_connection = function(ctx, invalid_con) { #' or invalid connection, expect_error(dbGetQuery(invalid_con, trivial_query())) }, get_query_syntax_error = function(con) { #' if the syntax of the query is invalid, expect_error(dbGetQuery(con, "SELLECT")) }, get_query_non_string = function(con) { #' or if the query is not a non-`NA` string. expect_error(dbGetQuery(con, character())) expect_error(dbGetQuery(con, letters)) expect_error(dbGetQuery(con, NA_character_)) }, get_query_n_bad = function(con) { #' If the `n` argument is not an atomic whole number #' greater or equal to -1 or Inf, an error is raised, query <- trivial_query() expect_error(dbGetQuery(con, query, n = -2)) expect_error(dbGetQuery(con, query, n = 1.5)) expect_error(dbGetQuery(con, query, n = integer())) expect_error(dbGetQuery(con, query, n = 1:3)) }, get_query_good_after_bad_n = function(con) { #' but a subsequent call to `dbGetQuery()` with proper `n` argument succeeds. query <- trivial_query() expect_error(dbGetQuery(con, query, n = -2)) rows <- check_df(dbGetQuery(con, query)) expect_equal(rows, data.frame(a = 1.5)) }, #' @section Additional arguments: #' The following arguments are not part of the `dbGetQuery()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `n` (default: -1) #' - `params` (default: `NULL`) #' - `immediate` (default: `NULL`) #' #' They must be provided as named arguments. #' See the "Specification" and "Value" sections for details on their usage. get_query_row_names = function(con) { #' @section Specification: #' #' A column named `row_names` is treated like any other column. query <- trivial_query(column = "row_names") result <- trivial_df(column = "row_names") rows <- check_df(dbGetQuery(con, query)) expect_identical(rows, result) expect_identical(.row_names_info(rows), -1L) }, #' get_query_multi_row_single_column = function(ctx, con) { #' The `n` argument specifies the number of rows to be fetched. #' If omitted, fetching multi-row queries with one query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(3) rows <- check_df(dbGetQuery(con, query)) expect_identical(rows, result) }, get_query_multi_row_multi_column = function(ctx, con) { #' or more columns returns the entire result. query <- sql_union( .ctx = ctx, paste("SELECT", 1:5 + 0.5, "AS a,", 4:0 + 0.5, "AS b"), .order_by = "a" ) rows <- check_df(dbGetQuery(con, query)) expect_identical(rows, data.frame(a = 1:5 + 0.5, b = 4:0 + 0.5)) }, get_query_n_multi_row_inf = function(ctx, con) { #' A value of [Inf] for the `n` argument is supported #' and also returns the full result. query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(3) rows <- check_df(dbGetQuery(con, query, n = Inf)) expect_identical(rows, result) }, get_query_n_more_rows = function(ctx, con) { #' If more rows than available are fetched (by passing a too large value for #' `n`), the result is returned in full without warning. query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(3) rows <- check_df(dbGetQuery(con, query, n = 5L)) expect_identical(rows, result) }, get_query_n_zero_rows = function(ctx, con) { #' If zero rows are requested, the columns of the data frame are still fully #' typed. query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(0) rows <- check_df(dbGetQuery(con, query, n = 0L)) expect_identical(rows, result) }, get_query_n_incomplete = function(ctx, con) { #' Fetching fewer rows than available is permitted, #' no warning is issued. query <- trivial_query(3, .ctx = ctx, .order_by = "a") result <- trivial_df(2) rows <- check_df(dbGetQuery(con, query, n = 2L)) expect_identical(rows, result) }, #' get_query_params = function(ctx, con) { #' The `param` argument allows passing query parameters, see [dbBind()] for details. placeholder_funs <- get_placeholder_funs(ctx) for (placeholder_fun in placeholder_funs) { placeholder <- placeholder_fun(1) query <- paste0("SELECT ", placeholder, " + 1.0 AS a") values <- trivial_values(3) - 1 params <- stats::setNames(list(values), names(placeholder)) ret <- dbGetQuery(con, query, params = params) expect_equal(ret, trivial_df(3), info = placeholder) } }, get_query_immediate = function(con, table_name) { #' @section Specification for the `immediate` argument: #' #' The `immediate` argument supports distinguishing between "direct" #' and "prepared" APIs offered by many database drivers. #' Passing `immediate = TRUE` leads to immediate execution of the #' query or statement, via the "direct" API (if supported by the driver). #' The default `NULL` means that the backend should choose whatever API #' makes the most sense for the database, and (if relevant) tries the #' other API if the first attempt fails. A successful second attempt #' should result in a message that suggests passing the correct #' `immediate` argument. #' Examples for possible behaviors: #' 1. DBI backend defaults to `immediate = TRUE` internally #' 1. A query without parameters is passed: query is executed #' 1. A query with parameters is passed: #' 1. `params` not given: rejected immediately by the database #' because of a syntax error in the query, the backend tries #' `immediate = FALSE` (and gives a message) #' 1. `params` given: query is executed using `immediate = FALSE` #' 1. DBI backend defaults to `immediate = FALSE` internally #' 1. A query without parameters is passed: #' 1. simple query: query is executed #' 1. "special" query (such as setting a config options): fails, #' the backend tries `immediate = TRUE` (and gives a message) #' 1. A query with parameters is passed: #' 1. `params` not given: waiting for parameters via [dbBind()] #' 1. `params` given: query is executed res <- expect_visible(dbGetQuery(con, trivial_query(), immediate = TRUE)) check_df(res) }, # NULL ) DBItest/R/spec-arrow-roundtrip.R0000644000176200001440000000011714602017371016202 0ustar liggesusers# FIXME: Adapt tests from spec_result_roundtrip spec_arrow_roundtrip <- list() DBItest/R/spec-arrow-write-table-arrow.R0000644000176200001440000006121714602110420017520 0ustar liggesusers#' spec_arrow_write_table_arrow #' @family Arrow specifications #' @usage NULL #' @format NULL #' @keywords NULL #' @importFrom lubridate with_tz spec_arrow_write_table_arrow <- list( arrow_write_table_arrow_formals = function() { # expect_equal(names(formals(dbWriteTableArrow)), c("conn", "name", "value", "...")) }, arrow_write_table_arrow_return = function(con, table_name) { #' @return #' `dbWriteTableArrow()` returns `TRUE`, invisibly. expect_invisible_true(dbWriteTableArrow(con, table_name, stream_frame(a = 1L))) }, #' arrow_write_table_arrow_error_overwrite = function(ctx, con, table_name) { skip_if_not_dbitest(ctx, "1.8.0.39") #' @section Failure modes: #' If the table exists, and both `append` and `overwrite` arguments are unset, test_in <- data.frame(a = 1L) dbWriteTableArrow(con, table_name, test_in %>% stream_frame()) expect_error(dbWriteTableArrow(con, table_name, stream_frame(a = 2L))) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, arrow_write_table_arrow_append_incompatible = function(con, table_name) { #' or `append = TRUE` and the data frame with the new data has different #' column names, #' an error is raised; the remote table remains unchanged. test_in <- data.frame(a = 1L) dbWriteTableArrow(con, table_name, test_in %>% stream_frame()) expect_error(dbWriteTableArrow(con, table_name, stream_frame(b = 2L), append = TRUE)) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, #' arrow_write_table_arrow_closed_connection = function(ctx, closed_con) { #' An error is raised when calling this method for a closed expect_error(dbWriteTableArrow(closed_con, "test", stream_frame(a = 1))) }, arrow_write_table_arrow_invalid_connection = function(ctx, invalid_con) { #' or invalid connection. expect_error(dbWriteTableArrow(invalid_con, "test", stream_frame(a = 1))) }, arrow_write_table_arrow_error = function(ctx, con, table_name) { skip_if_not_dbitest(ctx, "1.8.0.38") #' An error is also raised test_in <- data.frame(a = 1L) #' if `name` cannot be processed with [dbQuoteIdentifier()] or expect_error(dbWriteTableArrow(con, NA, test_in %>% stream_frame())) #' if this results in a non-scalar. expect_error(dbWriteTableArrow(con, c(table_name, table_name), test_in %>% stream_frame())) #' Invalid values for the additional arguments #' `overwrite`, `append`, and `temporary` #' (non-scalars, expect_error(dbWriteTableArrow(con, table_name, test_in %>% stream_frame(), overwrite = c(TRUE, FALSE))) expect_error(dbWriteTableArrow(con, table_name, test_in %>% stream_frame(), append = c(TRUE, FALSE))) expect_error(dbWriteTableArrow(con, table_name, test_in %>% stream_frame(), temporary = c(TRUE, FALSE))) #' unsupported data types, expect_error(dbWriteTableArrow(con, table_name, test_in %>% stream_frame(), overwrite = 1L)) expect_error(dbWriteTableArrow(con, table_name, test_in %>% stream_frame(), append = 1L)) expect_error(dbWriteTableArrow(con, table_name, test_in %>% stream_frame(), temporary = 1L)) #' `NA`, expect_error(dbWriteTableArrow(con, table_name, test_in %>% stream_frame(), overwrite = NA)) expect_error(dbWriteTableArrow(con, table_name, test_in %>% stream_frame(), append = NA)) expect_error(dbWriteTableArrow(con, table_name, test_in %>% stream_frame(), temporary = NA)) #' incompatible values, expect_error(dbWriteTableArrow(con, table_name, test_in %>% stream_frame(), overwrite = TRUE, append = TRUE)) #' incompatible columns) dbWriteTableArrow(con, table_name, test_in %>% stream_frame()) expect_error(dbWriteTableArrow(con, table_name, stream_frame(b = 2L, c = 3L), append = TRUE)) #' also raise an error. }, #' @section Additional arguments: #' The following arguments are not part of the `dbWriteTableArrow()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `overwrite` (default: `FALSE`) #' - `append` (default: `FALSE`) #' - `temporary` (default: `FALSE`) #' #' They must be provided as named arguments. #' See the "Specification" and "Value" sections for details on their usage. arrow_write_table_arrow_name = function(ctx, con) { #' @section Specification: #' The `name` argument is processed as follows, #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- data.frame(a = 1) local_remove_test_table(con, table_name) #' - If an unquoted table name as string: `dbWriteTableArrow()` will do the quoting, dbWriteTableArrow(con, table_name, test_in %>% stream_frame()) test_out <- check_df(dbReadTable(con, dbQuoteIdentifier(con, table_name))) expect_equal_df(test_out, test_in) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` } }, arrow_write_table_arrow_name_quoted = function(ctx, con) { #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done skip_if_not_dbitest(ctx, "1.7.2") if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- data.frame(a = 1) local_remove_test_table(con, table_name) dbWriteTableArrow(con, dbQuoteIdentifier(con, table_name), test_in %>% stream_frame()) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) } }, #' arrow_write_table_arrow_value_df = function(con, table_name) { #' The `value` argument must be a data frame test_in <- trivial_df() dbWriteTableArrow(con, table_name, test_in %>% stream_frame()) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, arrow_write_table_arrow_value_subset = function(ctx, con, table_name) { #' with a subset of the columns of the existing table if `append = TRUE`. test_in <- trivial_df(3, letters[1:3]) dbCreateTable(con, table_name, test_in) dbWriteTableArrow(con, table_name, test_in[2] %>% stream_frame(), append = TRUE) test_out <- check_df(dbReadTable(con, table_name)) test_in[c(1, 3)] <- NA_real_ expect_equal_df(test_out, test_in) }, arrow_write_table_arrow_value_shuffle = function(ctx, con, table_name) { #' The order of the columns does not matter with `append = TRUE`. test_in <- trivial_df(3, letters[1:3]) dbCreateTable(con, table_name, test_in) dbWriteTableArrow(con, table_name, test_in[c(2, 3, 1)] %>% stream_frame(), append = TRUE) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, # arrow_write_table_arrow_value_shuffle_subset = function(ctx, con, table_name) { test_in <- trivial_df(4, letters[1:4]) dbCreateTable(con, table_name, test_in) dbWriteTableArrow(con, table_name, test_in[c(4, 1, 3)] %>% stream_frame(), append = TRUE) test_out <- check_df(dbReadTable(con, table_name)) test_in[2] <- NA_real_ expect_equal_df(test_out, test_in) }, #' arrow_write_table_arrow_overwrite = function(ctx, con, table_name) { skip_if_not_dbitest(ctx, "1.8.0.37") #' If the `overwrite` argument is `TRUE`, an existing table of the same name #' will be overwritten. penguins <- get_penguins(ctx) dbWriteTableArrow(con, table_name, penguins) expect_error( dbWriteTableArrow(con, table_name, penguins[1, ] %>% stream_frame(), overwrite = TRUE), NA ) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins[1, ]) }, arrow_write_table_arrow_overwrite_missing = function(ctx, con, table_name) { skip_if_not_dbitest(ctx, "1.8.0.36") #' This argument doesn't change behavior if the table does not exist yet. penguins_in <- get_penguins(ctx) expect_error( dbWriteTableArrow(con, table_name, penguins_in[1, ] %>% stream_frame(), overwrite = TRUE), NA ) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins_in[1, ]) }, #' arrow_write_table_arrow_append = function(ctx, con, table_name) { skip_if_not_dbitest(ctx, "1.8.0.35") #' If the `append` argument is `TRUE`, the rows in an existing table are #' preserved, and the new data are appended. penguins <- get_penguins(ctx) dbWriteTableArrow(con, table_name, penguins) expect_error(dbWriteTableArrow(con, table_name, penguins[1, ] %>% stream_frame(), append = TRUE), NA) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, rbind(penguins, penguins[1, ])) }, arrow_write_table_arrow_append_new = function(ctx, con, table_name) { skip_if_not_dbitest(ctx, "1.8.0.34") #' If the table doesn't exist yet, it is created. penguins <- get_penguins(ctx) expect_error(dbWriteTableArrow(con, table_name, penguins[1, ] %>% stream_frame(), append = TRUE), NA) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins[1, ]) }, #' arrow_write_table_arrow_temporary_1 = function(ctx, con, table_name = "dbit08") { skip_if_not_dbitest(ctx, "1.8.0.33") #' If the `temporary` argument is `TRUE`, the table is not available in a #' second connection and is gone after reconnecting. #' Not all backends support this argument. if (!isTRUE(ctx$tweaks$temporary_tables)) { skip("tweak: temporary_tables") } penguins <- get_penguins(ctx) dbWriteTableArrow(con, table_name, penguins %>% stream_frame(), temporary = TRUE) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins) con2 <- local_connection(ctx) expect_error(dbReadTable(con2, table_name)) }, # second stage arrow_write_table_arrow_temporary_2 = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.33") if (!isTRUE(ctx$tweaks$temporary_tables)) { skip("tweak: temporary_tables") } # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit08" expect_error(dbReadTable(con, table_name)) }, arrow_write_table_arrow_visible_in_other_connection_1 = function(ctx, local_con) { skip_if_not_dbitest(ctx, "1.8.0.31") #' A regular, non-temporary table is visible in a second connection, penguins30 <- get_penguins(ctx) # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit09" dbWriteTableArrow(local_con, table_name, penguins30 %>% stream_frame()) penguins_out <- check_df(dbReadTable(local_con, table_name)) expect_equal_df(penguins_out, penguins30) con2 <- local_connection(ctx) expect_equal_df(dbReadTable(con2, table_name), penguins30) }, # second stage arrow_write_table_arrow_visible_in_other_connection_2 = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.31") #' in a pre-existing connection, penguins30 <- get_penguins(ctx) # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit09" expect_equal_df(check_df(dbReadTable(con, table_name)), penguins30) }, # third stage arrow_write_table_arrow_visible_in_other_connection_3 = function(ctx, local_con, table_name = "dbit09") { skip_if_not_dbitest(ctx, "1.8.0.31") #' and after reconnecting to the database. penguins30 <- get_penguins(ctx) expect_equal_df(check_df(dbReadTable(local_con, table_name)), penguins30) }, #' arrow_write_table_arrow_roundtrip_keywords = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.28") #' SQL keywords can be used freely in table names, column names, and data. tbl_in <- data.frame( select = "unique", from = "join", where = "order", stringsAsFactors = FALSE ) test_arrow_roundtrip(con, tbl_in, name = "exists") }, arrow_write_table_arrow_roundtrip_quotes = function(ctx, con, table_name) { skip_if_not_dbitest(ctx, "1.8.0.27") #' Quotes, commas, spaces, and other special characters such as newlines and tabs, #' can also be used in the data, tbl_in <- data.frame( as.character(dbQuoteString(con, "")), as.character(dbQuoteIdentifier(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\Nb", "a\\tb", "a\\rb", "a\\bb", "a\\Zb", stringsAsFactors = FALSE ) names(tbl_in) <- letters[seq_along(tbl_in)] test_arrow_roundtrip(con, tbl_in) }, arrow_write_table_arrow_roundtrip_quotes_table_names = function(ctx, con) { #' and, if the database supports non-syntactic identifiers, #' also for table names if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } table_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\Nb", "a\\tb", "a\\rb", "a\\bb", "a\\Zb" ) tbl_in <- trivial_df() for (table_name in table_names) { test_arrow_roundtrip_one(con, tbl_in, .add_na = "none") } }, arrow_write_table_arrow_roundtrip_quotes_column_names = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.26") #' and column names. skip_if_not_dbitest(ctx, "1.7.2") if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } column_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "a,b", "a\nb", "a\tb", "a\rb", "a\bb", "a\\nb", "a\\tb", "a\\rb", "a\\bb", "a\\zb" ) tbl_in <- trivial_df(length(column_names), column_names) test_arrow_roundtrip_one(con, tbl_in, .add_na = "none") }, #' arrow_write_table_arrow_roundtrip_integer = function(ctx, con) { #' The following data types must be supported at least, #' and be read identically with [dbReadTable()]: #' - integer tbl_in <- data.frame(a = c(1:5)) test_arrow_roundtrip(con, tbl_in) }, arrow_write_table_arrow_roundtrip_numeric = function(ctx, con) { #' - numeric tbl_in <- data.frame(a = c(seq(1, 3, by = 0.5))) test_arrow_roundtrip(con, tbl_in) #' (the behavior for `Inf` and `NaN` is not specified) }, arrow_write_table_arrow_roundtrip_logical = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.25") #' - logical tbl_in <- data.frame(a = c(TRUE, FALSE, NA)) tbl_exp <- tbl_in tbl_exp$a <- ctx$tweaks$logical_return(tbl_exp$a) test_arrow_roundtrip(con, tbl_in, tbl_exp) }, arrow_write_table_arrow_roundtrip_null = function(ctx, con) { #' - `NA` as NULL tbl_in <- data.frame(a = NA) test_arrow_roundtrip( con, tbl_in, transform = function(tbl_out) { tbl_out$a <- as.logical(tbl_out$a) # Plain NA is of type logical tbl_out } ) }, #' - 64-bit values (using `"bigint"` as field type); the result can be arrow_write_table_arrow_roundtrip_64_bit_numeric = function(ctx, con) { skip("Internal: Need to enhance test_arrow_roundtrip()") tbl_in <- data.frame(a = c(-1e14, 1e15)) test_arrow_roundtrip( con, tbl_in, transform = function(tbl_out) { #' - converted to a numeric, which may lose precision, tbl_out$a <- as.numeric(tbl_out$a) tbl_out } ) }, # arrow_write_table_arrow_roundtrip_64_bit_character = function(ctx, con) { skip("Internal: Need to enhance test_arrow_roundtrip()") tbl_in <- data.frame(a = c(-1e14, 1e15)) tbl_exp <- tbl_in tbl_exp$a <- format(tbl_exp$a, scientific = FALSE) test_arrow_roundtrip( con, tbl_in, tbl_exp, transform = function(tbl_out) { #' - converted a character vector, which gives the full decimal #' representation tbl_out$a <- as.character(tbl_out$a) tbl_out } ) }, # arrow_write_table_arrow_roundtrip_64_bit_roundtrip = function(ctx, con, table_name) { skip("Internal: Need to enhance test_arrow_roundtrip()") tbl_in <- data.frame(a = c(-1e14, 1e15)) dbWriteTableArrow(con, table_name, tbl_in, field.types = c(a = "BIGINT")) tbl_out <- dbReadTable(con, table_name) #' - written to another table and read again unchanged test_arrow_roundtrip(con, tbl_out, tbl_expected = tbl_out) }, arrow_write_table_arrow_roundtrip_character = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.22") #' - character (in both UTF-8 tbl_in <- data.frame( id = seq_along(get_texts()), a = get_texts(), stringsAsFactors = FALSE ) test_arrow_roundtrip(con, tbl_in) }, arrow_write_table_arrow_roundtrip_character_native = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.21") #' and native encodings), tbl_in <- data.frame( a = c(enc2native(get_texts())), stringsAsFactors = FALSE ) test_arrow_roundtrip(con, tbl_in) }, arrow_write_table_arrow_roundtrip_character_empty = function(ctx, con) { #' supporting empty strings tbl_in <- data.frame( a = c("", "a"), stringsAsFactors = FALSE ) test_arrow_roundtrip(con, tbl_in) }, arrow_write_table_arrow_roundtrip_character_empty_after = function(ctx, con) { #' before and after a non-empty string tbl_in <- data.frame( a = c("a", ""), stringsAsFactors = FALSE ) test_arrow_roundtrip(con, tbl_in) }, arrow_write_table_arrow_roundtrip_factor = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.20") #' - factor (possibly returned as character) tbl_in <- data.frame( a = factor(get_texts()) ) tbl_exp <- tbl_in tbl_exp$a <- as.character(tbl_exp$a) test_arrow_roundtrip( con, tbl_in, tbl_exp, transform = function(tbl_out) { tbl_out$a <- as.character(tbl_out$a) tbl_out } ) }, arrow_write_table_arrow_roundtrip_blob = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.18") #' - objects of type [blob::blob] #' (if supported by the database) if (isTRUE(ctx$tweaks$omit_blob_tests)) { skip("tweak: omit_blob_tests") } tbl_in <- data.frame(id = 1L, a = blob::blob(as.raw(0:10))) test_arrow_roundtrip( con, tbl_in, transform = function(tbl_out) { tbl_out$a <- blob::as_blob(tbl_out$a) tbl_out } ) }, arrow_write_table_arrow_roundtrip_date = function(ctx, con) { #' - date #' (if supported by the database; if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } #' returned as `Date`), tbl_in <- data.frame(a = as_numeric_date(c(Sys.Date() + 1:5))) test_arrow_roundtrip( con, tbl_in, transform = function(tbl_out) { expect_type(unclass(tbl_out$a), "double") tbl_out } ) }, arrow_write_table_arrow_roundtrip_date_extended = function(ctx, con) { #' also for dates prior to 1970 or 1900 or after 2038 if (!isTRUE(ctx$tweaks$date_typed)) { skip("tweak: !date_typed") } tbl_in <- data.frame(a = as_numeric_date(c( "1811-11-11", "1899-12-31", "1900-01-01", "1950-05-05", "1969-12-31", "1970-01-01", "2037-01-01", "2038-01-01", "2040-01-01", "2999-09-09" ))) test_arrow_roundtrip( con, tbl_in, transform = function(tbl_out) { expect_type(unclass(tbl_out$a), "double") tbl_out } ) }, arrow_write_table_arrow_roundtrip_time = function(ctx, con) { #' - time #' (if supported by the database; if (!isTRUE(ctx$tweaks$time_typed)) { skip("tweak: !time_typed") } tbl_in <- data.frame(a = hms::hms(minutes = 1:5)) tbl_in$b <- .difftime(as.numeric(tbl_in$a) / 60, "mins") tbl_exp <- tbl_in tbl_exp$a <- hms::as_hms(tbl_exp$a) tbl_exp$b <- hms::as_hms(tbl_exp$b) test_arrow_roundtrip( con, tbl_in, tbl_exp, transform = function(tbl_out) { #' returned as objects that inherit from `difftime`) expect_s3_class(tbl_out$a, "difftime") expect_s3_class(tbl_out$b, "difftime") tbl_out$a <- hms::as_hms(tbl_out$a) tbl_out$b <- hms::as_hms(tbl_out$b) tbl_out } ) }, arrow_write_table_arrow_roundtrip_timestamp = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.17") #' - timestamp #' (if supported by the database; if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } #' returned as `POSIXct` local <- round(Sys.time()) + c( 1, 60, 3600, 86400, 86400 * 90, 86400 * 180, 86400 * 270, 1e9, 5e9 ) attr(local, "tzone") <- "" tbl_in <- data.frame(id = seq_along(local)) tbl_in$local <- local tbl_in$gmt <- lubridate::with_tz(local, tzone = "GMT") tbl_in$pst8pdt <- lubridate::with_tz(local, tzone = "PST8PDT") tbl_in$utc <- lubridate::with_tz(local, tzone = "UTC") #' respecting the time zone but not necessarily preserving the #' input time zone), test_arrow_roundtrip( con, tbl_in, transform = function(out) { dates <- map_lgl(out, inherits, "POSIXt") tz <- toupper(names(out)) tz[tz == "LOCAL"] <- "" out[dates] <- Map(lubridate::with_tz, out[dates], tz[dates]) out } ) }, arrow_write_table_arrow_roundtrip_timestamp_extended = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.16") #' also for timestamps prior to 1970 or 1900 or after 2038 if (!isTRUE(ctx$tweaks$timestamp_typed)) { skip("tweak: !timestamp_typed") } local <- as.POSIXct(c( "1811-11-11", "1899-12-31", "1900-01-01", "1950-05-05", "1969-12-31", "1970-01-01", "2037-01-01", "2038-01-01", "2040-01-01", "2999-09-09" )) attr(local, "tzone") <- "" tbl_in <- data.frame(id = seq_along(local)) tbl_in$local <- local tbl_in$gmt <- lubridate::with_tz(local, tzone = "GMT") tbl_in$pst8pdt <- lubridate::with_tz(local, tzone = "PST8PDT") tbl_in$utc <- lubridate::with_tz(local, tzone = "UTC") #' respecting the time zone but not necessarily preserving the #' input time zone) test_arrow_roundtrip( con, tbl_in, transform = function(out) { dates <- map_lgl(out, inherits, "POSIXt") tz <- toupper(names(out)) tz[tz == "LOCAL"] <- "" out[dates] <- Map(lubridate::with_tz, out[dates], tz[dates]) out } ) }, #' arrow_write_table_arrow_roundtrip_mixed = function(ctx, con) { #' Mixing column types in the same table is supported. data <- list("a", 1L, 1.5) data <- map(data, c, NA) expanded <- expand.grid(a = data, b = data, c = data) tbl_in_list <- map( seq_len(nrow(expanded)), function(i) { as.data.frame(map(expanded[i, ], unlist, recursive = FALSE)) } ) map(tbl_in_list, test_arrow_roundtrip, con = con) }, # NULL ) test_arrow_roundtrip <- function(...) { test_arrow_roundtrip_one(..., .add_na = "none") test_arrow_roundtrip_one(..., .add_na = "above") test_arrow_roundtrip_one(..., .add_na = "below") } test_arrow_roundtrip_one <- function(con, tbl_in, tbl_expected = tbl_in, transform = identity, name = NULL, use_append = FALSE, .add_na = "none") { # Need data frames here because streams can be collected only once stopifnot(is.data.frame(tbl_in)) stopifnot(is.data.frame(tbl_expected)) force(tbl_expected) if (.add_na == "above") { tbl_in <- stream_add_na_above(tbl_in) tbl_expected <- stream_add_na_above(tbl_expected) } else if (.add_na == "below") { tbl_in <- stream_add_na_below(tbl_in) tbl_expected <- stream_add_na_below(tbl_expected) } if (is.null(name)) { name <- random_table_name() } local_remove_test_table(con, name = name) if (use_append) { dbCreateTableArrow(con, name, tbl_in %>% stream_frame()) dbAppendTableArrow(con, name, tbl_in %>% stream_frame()) } else { dbWriteTableArrow(con, name, tbl_in %>% stream_frame()) } stream <- dbReadTableArrow(con, name) tbl_out <- check_arrow(stream, transform) expect_equal_df(tbl_out, tbl_expected) } stream_add_na_above <- function(tbl) { idx <- c(NA, seq_len(nrow(tbl))) tbl <- tbl[idx, , drop = FALSE] unrowname(tbl) } stream_add_na_below <- function(tbl) { idx <- c(seq_len(nrow(tbl)), NA) tbl <- tbl[idx, , drop = FALSE] unrowname(tbl) } DBItest/R/spec-meta-bind-expr.R0000644000176200001440000003204514725004207015645 0ustar liggesusers#' spec_meta_bind #' @name spec_meta_bind #' @aliases NULL #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_meta_bind_expr <- function( arrow = c("none", "query"), bind = c("df", "stream"), ..., ctx = stop("ctx is available during run time only")) { check_dots_empty() arrow <- arg_match(arrow) bind <- arg_match(bind) out <- list( bind_return_value = function() { #' @return check_return_value <- function(bind_res, res) { #' `dbBind()` returns the result set, expect_identical(res, bind_res$value) #' invisibly, expect_false(bind_res$visible) } #' for queries issued by [dbSendQuery()] or [dbSendQueryArrow()] and test_select_bind_expr( arrow = arrow, bind = bind, 1L, check_return_value = check_return_value ) }, # bind_return_value_statement = if (arrow != "query") function() { check_return_value <- function(bind_res, res) { expect_identical(res, bind_res$value) expect_false(bind_res$visible) } #' also for data manipulation statements issued by #' [dbSendStatement()]. test_select_bind_expr( arrow = arrow, bind = bind, 1L, check_return_value = check_return_value, query = FALSE ) }, # bind_too_many = function() { #' @section Failure modes: patch_bind_values <- function(bind_values) { #' Binding too many if (is.null(names(bind_values))) { c(bind_values, bind_values[[1L]]) } else { c(bind_values, bogus = bind_values[[1L]]) } } test_select_bind_expr( arrow = arrow, bind = bind, 1L, patch_bind_values = patch_bind_values, bind_error = ".*" ) }, # bind_not_enough = function() { patch_bind_values <- function(bind_values) { #' or not enough values, bind_values[-1L] } test_select_bind_expr( arrow = arrow, bind = bind, 1:2, patch_bind_values = patch_bind_values, bind_error = ".*" ) }, # bind_wrong_name = function() { patch_bind_values <- function(bind_values) { #' or parameters with wrong names stats::setNames(bind_values, paste0("bogus", names(bind_values))) } test_select_bind_expr( arrow = arrow, bind = bind, 1L, patch_bind_values = patch_bind_values, bind_error = ".*", requires_names = TRUE ) }, # bind_multi_row_unequal_length = if (bind == "df") function() { patch_bind_values <- function(bind_values) { #' or unequal length, bind_values[[2]] <- bind_values[[2]][-1] bind_values } #' also raises an error. test_select_bind_expr( arrow = arrow, bind = bind, list(1:3, 2:4), patch_bind_values = patch_bind_values, bind_error = ".*", query = FALSE ) }, # bind_named_param_unnamed_placeholders = function() { #' If the placeholders in the query are named, patch_bind_values <- function(bind_values) { #' all parameter values must have names stats::setNames(bind_values, NULL) } test_select_bind_expr( arrow = arrow, bind = bind, 1L, patch_bind_values = patch_bind_values, bind_error = ".*", requires_names = TRUE ) }, # bind_named_param_empty_placeholders = function() { patch_bind_values <- function(bind_values) { #' (which must not be empty names(bind_values)[[1]] <- "" bind_values } test_select_bind_expr( arrow = arrow, bind = bind, list(1L, 2L), patch_bind_values = patch_bind_values, bind_error = ".*", requires_names = TRUE ) }, # bind_named_param_na_placeholders = if (arrow == "none") function() { patch_bind_values <- function(bind_values) { #' or `NA`), names(bind_values)[[1]] <- NA bind_values } test_select_bind_expr( arrow = arrow, bind = bind, list(1L, 2L), patch_bind_values = patch_bind_values, bind_error = ".*", requires_names = TRUE ) }, bind_unnamed_param_named_placeholders = function() { #' and vice versa, patch_bind_values <- function(bind_values) { stats::setNames(bind_values, letters[seq_along(bind_values)]) } #' otherwise an error is raised. test_select_bind_expr( arrow = arrow, bind = bind, 1L, patch_bind_values = patch_bind_values, bind_error = ".*", requires_names = FALSE ) }, #' The behavior for mixing placeholders of different types #' (in particular mixing positional and named placeholders) #' is not specified. #' bind_premature_clear = function() { #' Calling `dbBind()` on a result set already cleared by [dbClearResult()] is_premature_clear <- TRUE #' also raises an error. test_select_bind_expr( arrow = arrow, bind = bind, 1L, is_premature_clear = is_premature_clear, bind_error = ".*" ) }, bind_multi_row = function() { #' @section Specification: #' The elements of the `params` argument do not need to be scalars, #' vectors of arbitrary length test_select_bind_expr( arrow = arrow, bind = bind, list(1:3) ) }, # bind_multi_row_zero_length = function() { #' (including length 0) test_select_bind_expr( arrow = arrow, bind = bind, list(integer(), integer()), dbitest_version = if (arrow == "query" || bind == "stream") "1.7.99.12" ) #' are supported. # This behavior is tested as part of run_bind_tester$fun #' For queries, calling `dbFetch()` binding such parameters returns #' concatenated results, equivalent to binding and fetching for each set #' of values and connecting via [rbind()]. }, # bind_multi_row_statement = if (arrow != "query") function() { # This behavior is tested as part of run_bind_tester$fun #' For data manipulation statements, `dbGetRowsAffected()` returns the #' total number of rows affected if binding non-scalar parameters. test_select_bind_expr( arrow = arrow, bind = bind, list(1:3), query = FALSE ) }, # bind_repeated = function() { #' `dbBind()` also accepts repeated calls on the same result set is_repeated <- TRUE #' for both queries test_select_bind_expr( arrow = arrow, bind = bind, 1L, is_repeated = is_repeated ) }, # bind_repeated_statement = if (arrow != "query") function() { is_repeated <- TRUE #' and data manipulation statements, test_select_bind_expr( arrow = arrow, bind = bind, 1L, is_repeated = is_repeated, query = FALSE ) }, # bind_repeated_untouched = function() { #' even if no results are fetched between calls to `dbBind()`, is_repeated <- TRUE is_untouched <- TRUE #' for both queries test_select_bind_expr( arrow = arrow, bind = bind, 1L, is_repeated = is_repeated, is_untouched = is_untouched ) }, # bind_repeated_untouched_statement = if (arrow != "query") function() { is_repeated <- TRUE is_untouched <- TRUE #' and data manipulation statements. test_select_bind_expr( arrow = arrow, bind = bind, 1L, is_repeated = is_repeated, is_untouched = is_untouched, query = FALSE ) }, #' bind_named_param_shuffle = function() { #' If the placeholders in the query are named, patch_bind_values <- function(bind_values) { #' their order in the `params` argument is not important. bind_values[c(3, 1, 2, 4)] } test_select_bind_expr( arrow = arrow, bind = bind, c(1:3 + 0.5, NA), patch_bind_values = patch_bind_values, requires_names = TRUE ) }, #' bind_integer = function() { #' At least the following data types are accepted on input (including [NA]): #' - [integer] test_select_bind_expr( arrow = arrow, bind = bind, c(1:3, NA) ) }, bind_numeric = function() { #' - [numeric] test_select_bind_expr( arrow = arrow, bind = bind, c(1:3 + 0.5, NA) ) }, bind_logical = function() { #' - [logical] for Boolean values test_select_bind_expr( arrow = arrow, bind = bind, c(TRUE, FALSE, NA) ) }, bind_character = function() { #' - [character] test_select_bind_expr( arrow = arrow, bind = bind, c(get_texts(), NA) ) }, bind_character_escape = function() { #' (also with special characters such as spaces, newlines, quotes, and backslashes) test_select_bind_expr( arrow = arrow, bind = bind, c(" ", "\n", "\r", "\b", "'", '"', "[", "]", "\\", NA) ) }, bind_factor = function() { #' - [factor] (bound as character, #' with warning) test_select_bind_expr( arrow = arrow, bind = bind, map(c(get_texts(), NA_character_), factor), warn = if (bind == "df") TRUE, dbitest_version = if (arrow == "query" && bind == "df") "1.7.99.13" ) }, bind_date = function() { #' - [Date] test_select_bind_expr( arrow = arrow, bind = bind, c(as.Date("2023-12-17") + 0:2, NA), skip_fun = function() !isTRUE(ctx$tweaks$date_typed) ) }, bind_date_integer = function() { #' (also when stored internally as integer) test_select_bind_expr( arrow = arrow, bind = bind, structure(c(18618:18620, NA), class = "Date"), skip_fun = function() !isTRUE(ctx$tweaks$date_typed) ) }, bind_timestamp = function() { #' - [POSIXct] timestamps data_in <- as.POSIXct(c( "2023-12-17 02:40:22", "2023-12-17 02:40:23", "2023-12-17 02:40:24", NA )) test_select_bind_expr( arrow = arrow, bind = bind, data_in, skip_fun = function() !isTRUE(ctx$tweaks$timestamp_typed) ) }, bind_timestamp_lt = function() { #' - [POSIXlt] timestamps data_in <- list( as.POSIXlt(as.POSIXct("2023-12-17 02:40:49")), as.POSIXlt(as.POSIXct("2023-12-17 02:40:50")), as.POSIXlt(as.POSIXct("2023-12-17 02:40:51")), as.POSIXlt(NA_character_) ) test_select_bind_expr( arrow = arrow, bind = bind, data_in, skip_fun = function() !isTRUE(ctx$tweaks$timestamp_typed) ) }, bind_time_seconds = function() { #' - [difftime] values data_in <- as.difftime(as.numeric(c(1:3, NA)), units = "secs") test_select_bind_expr( arrow = arrow, bind = bind, data_in, skip_fun = function() !isTRUE(ctx$tweaks$time_typed) ) }, bind_time_hours = function() { #' (also with units other than seconds data_in <- as.difftime(as.numeric(c(1:3, NA)), units = "hours") test_select_bind_expr( arrow = arrow, bind = bind, data_in, skip_fun = function() !isTRUE(ctx$tweaks$time_typed) ) }, bind_time_minutes_integer = function() { #' and with the value stored as integer) data_in <- as.difftime(c(1:3, NA), units = "mins") test_select_bind_expr( arrow = arrow, bind = bind, data_in, skip_fun = function() !isTRUE(ctx$tweaks$time_typed) ) }, bind_raw = if (bind == "df") function() { #' - lists of [raw] for blobs (with `NULL` entries for SQL NULL values) test_select_bind_expr( arrow = arrow, bind = bind, list(list(as.raw(1:10)), list(raw(3)), list(NULL)), skip_fun = function() isTRUE(ctx$tweaks$omit_blob_tests), dbitest_version = if (arrow == "query" && bind == "df") "1.7.99.14", cast_fun = ctx$tweaks$blob_cast ) }, bind_blob = function() { #' - objects of type [blob::blob] test_select_bind_expr( arrow = arrow, bind = bind, list(blob::blob(as.raw(1:10)), blob::blob(raw(3)), blob::blob(NULL)), skip_fun = function() isTRUE(ctx$tweaks$omit_blob_tests), cast_fun = ctx$tweaks$blob_cast ) }, # NULL ) infix <- get_bind_arrow_infix(arrow, bind) names(out) <- gsub("^", infix, names(out)) out } get_bind_arrow_infix <- function(arrow, bind) { if (arrow == "none") { if (bind == "df") { "" } else { "stream_" } } else { if (bind == "df") { "arrow_" } else { "arrow_stream_" } } } DBItest/R/spec-sql-exists-table.R0000644000176200001440000000653714602020561016234 0ustar liggesusers#' spec_sql_exists_table #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_exists_table <- list( exists_table_formals = function() { # expect_equal(names(formals(dbExistsTable)), c("conn", "name", "...")) }, exists_table_1 = function(ctx, con, table_name = "dbit05") { #' @return #' `dbExistsTable()` returns a logical scalar, `TRUE` if the table or view #' specified by the `name` argument exists, `FALSE` otherwise. expect_false(expect_visible(dbExistsTable(con, table_name))) penguins <- get_penguins(ctx) dbWriteTable(con, table_name, penguins) expect_true(expect_visible(dbExistsTable(con, table_name))) }, # second stage exists_table_2 = function(ctx, con) { # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit05" expect_false(expect_visible(dbExistsTable(con, table_name))) }, #' exists_table_temporary = function(ctx, con, table_name) { #' This includes temporary tables if supported by the database. expect_false(expect_visible(dbExistsTable(con, table_name))) if (isTRUE(ctx$tweaks$temporary_tables)) { dbWriteTable(con, table_name, data.frame(a = 1L), temporary = TRUE) expect_true(expect_visible(dbExistsTable(con, table_name))) } }, #' exists_table_closed_connection = function(ctx, closed_con) { #' @section Failure modes: #' An error is raised when calling this method for a closed expect_error(dbExistsTable(closed_con, "test")) }, exists_table_invalid_connection = function(ctx, invalid_con) { #' or invalid connection. expect_error(dbExistsTable(invalid_con, "test")) }, exists_table_error = function(con, table_name) { #' An error is also raised dbWriteTable(con, table_name, data.frame(a = 1L)) #' if `name` cannot be processed with [dbQuoteIdentifier()] or expect_error(dbExistsTable(con, NA)) #' if this results in a non-scalar. expect_error(dbExistsTable(con, c(table_name, table_name))) }, exists_table_name = function(ctx, con) { #' @section Specification: #' The `name` argument is processed as follows, #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { local_remove_test_table(con, table_name) expect_false(dbExistsTable(con, table_name)) test_in <- data.frame(a = 1L) dbWriteTable(con, table_name, test_in) #' - If an unquoted table name as string: `dbExistsTable()` will do the #' quoting, expect_true(dbExistsTable(con, table_name)) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done expect_true(dbExistsTable(con, dbQuoteIdentifier(con, table_name))) } }, #' exists_table_list = function(con, table_name) { #' For all tables listed by [dbListTables()], `dbExistsTable()` returns `TRUE`. dbWriteTable(con, table_name, data.frame(a = 1)) for (table_name in dbListTables(con)) { eval(bquote(expect_true(dbExistsTable(con, .(table_name))))) } }, # NULL ) DBItest/R/test-all.R0000644000176200001440000000547214602020561013625 0ustar liggesusers#' Run all tests #' #' `test_all()` calls all tests defined in this package (see the section #' "Tests" below). This function supports running only one test by setting an #' environment variable, e.g., set the `DBITEST_ONLY_RESULT` to a nonempty #' value to run only `test_result()`. #' #' Internally `^` and `$` are used as prefix and suffix around the #' regular expressions passed in the `skip` and `run_only` arguments. #' #' @section Tests: #' This function runs the following tests, except the stress tests: #' #' @param skip `[character()]`\cr A vector of regular expressions to match #' against test names; skip test if matching any. #' The regular expressions are matched against the entire test name #' minus a possible suffix `_N` where `N` is a number. #' For example, `skip = "exists_table"` will skip both #' `"exists_table_1"` and `"exists_table_2"`. #' @param run_only `[character()]`\cr A vector of regular expressions to match #' against test names; run only these tests. #' The regular expressions are matched against the entire test name. #' @param ctx `[DBItest_context]`\cr A test context as created by #' [make_context()]. #' #' @export test_all <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { run_all <- length(grep("^DBITEST_ONLY_", names(Sys.getenv()))) == 0 if (run_all || Sys.getenv("DBITEST_ONLY_GETTING_STARTED") != "") test_getting_started(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_DRIVER") != "") test_driver(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_CONNECTION") != "") test_connection(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_RESULT") != "") test_result(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_SQL") != "") test_sql(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_META") != "") test_meta(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_TRANSACTION") != "") test_transaction(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_ARROW") != "") test_arrow(skip = skip, run_only = run_only, ctx = ctx) if (run_all || Sys.getenv("DBITEST_ONLY_COMPLIANCE") != "") test_compliance(skip = skip, run_only = run_only, ctx = ctx) # stress tests are not tested by default (#92) invisible() } #' @rdname test_all #' @description `test_some()` allows testing one or more tests. #' @param test `[character]`\cr #' A character vector of regular expressions #' describing the tests to run. #' The regular expressions are matched against the entire test name. #' @export test_some <- function(test, ctx = get_default_context()) { test_all(run_only = test, skip = character(), ctx = ctx) invisible() } DBItest/R/spec-arrow-read-table-arrow.R0000644000176200001440000000675014602020561017310 0ustar liggesusers#' spec_arrow_read_table_arrow #' @family Arrow specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_arrow_read_table_arrow <- list( arrow_read_table_arrow_formals = function() { # expect_equal(names(formals(dbReadTableArrow)), c("conn", "name", "...")) }, arrow_read_table_arrow = function(ctx, con, table_name) { # Failed on duckdb skip_if_not_dbitest(ctx, "1.7.99.2") #' @return #' `dbReadTableArrow()` returns an Arrow object that contains the complete data #' from the remote table, effectively the result of calling [dbGetQueryArrow()] with #' `SELECT * FROM `. penguins_in <- get_penguins(ctx) dbWriteTable(con, table_name, penguins_in) penguins_out <- check_arrow(dbReadTableArrow(con, table_name)) expect_equal_df(penguins_out, penguins_in) }, #' arrow_read_table_arrow_missing = function(con, table_name) { #' @section Failure modes: #' An error is raised if the table does not exist. expect_error(dbReadTableArrow(con, table_name)) }, arrow_read_table_arrow_empty = function(ctx, con, table_name) { skip_if_not_dbitest(ctx, "1.8.0.14") #' @return #' An empty table is returned as an Arrow object with zero rows. penguins_in <- get_penguins(ctx)[integer(), ] dbWriteTable(con, table_name, penguins_in) penguins_out <- check_arrow(dbReadTableArrow(con, table_name)) expect_equal(nrow(penguins_out), 0L) expect_equal_df(penguins_out, penguins_in) }, #' arrow_read_table_arrow_closed_connection = function(ctx, con, table_name) { #' @section Failure modes: #' An error is raised when calling this method for a closed dbWriteTable(con, table_name, data.frame(a = 1.5)) con2 <- local_closed_connection(ctx = ctx) expect_error(dbReadTableArrow(con2, table_name)) }, arrow_read_table_arrow_invalid_connection = function(ctx, con, table_name) { #' or invalid connection. dbWriteTable(con, table_name, data.frame(a = 1.5)) con2 <- local_invalid_connection(ctx) expect_error(dbReadTableArrow(con2, table_name)) }, arrow_read_table_arrow_error = function(ctx, con, table_name) { #' An error is raised dbWriteTable(con, table_name, data.frame(a = 1.5)) #' if `name` cannot be processed with [dbQuoteIdentifier()] or expect_error(dbReadTableArrow(con, NA)) #' if this results in a non-scalar. expect_error(dbReadTableArrow(con, c(table_name, table_name))) }, arrow_read_table_arrow_name = function(ctx, con) { #' @section Specification: #' The `name` argument is processed as follows, #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { local_remove_test_table(con, table_name) test_in <- data.frame(a = 1.5) dbWriteTable(con, table_name, test_in) #' - If an unquoted table name as string: `dbReadTableArrow()` will do the #' quoting, test_out <- check_arrow(dbReadTableArrow(con, table_name)) expect_equal_df(test_out, test_in) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done test_out <- check_arrow(dbReadTableArrow(con, dbQuoteIdentifier(con, table_name))) expect_equal_df(test_out, test_in) } }, # NULL ) DBItest/R/test-transaction.R0000644000176200001440000000063614602017371015404 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_transaction()]: #' Test transaction functions NULL #' Test transaction functions #' #' @inheritParams test_all #' @include test-meta.R #' @family tests #' @export test_transaction <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "Transactions" run_tests(ctx, spec_transaction, skip, run_only, test_suite) } DBItest/R/spec-connection-get-info.R0000644000176200001440000000216014602017371016671 0ustar liggesusers#' spec_connection_get_info #' @family connection specifications #' @usage NULL #' @format NULL #' @keywords NULL #' @rdname spec_get_info spec_connection_get_info <- list( get_info_connection = function(con) { #' @return #' For objects of class [DBIConnection-class], `dbGetInfo()` info <- dbGetInfo(con) #' returns a named list expect_type(info, "list") info_names <- names(info) #' that contains at least the following components: #' necessary_names <- c( #' - `db.version`: version of the database server, "db.version", #' - `dbname`: database name, "dbname", #' - `username`: username to connect to the database, "username", #' - `host`: hostname of the database server, "host", #' - `port`: port on the database server. "port" ) for (name in necessary_names) { eval(bquote( expect_true(.(name) %in% info_names) )) } #' It must not contain a `password` component. expect_false("password" %in% info_names) #' Components that are not applicable should be set to `NA`. }, # NULL ) DBItest/R/spec-meta-get-info-result.R0000644000176200001440000000241514602017371016777 0ustar liggesusers#' spec_meta_get_info_result #' @family meta specifications #' @usage NULL #' @format NULL #' @keywords NULL #' @name spec_get_info spec_meta_get_info_result <- list( get_info_result = function(ctx, con) { #' @return #' For objects of class [DBIResult-class], `dbGetInfo()` res <- local_result(dbSendQuery(con, trivial_query())) info <- dbGetInfo(res) #' returns a named list expect_type(info, "list") info_names <- names(info) #' that contains at least the following components: #' necessary_names <- c( #' - `statatment`: the statement used with [dbSendQuery()] or [dbExecute()], #' as returned by [dbGetStatement()], "statement", #' - `row.count`: the number of rows fetched so far (for queries), #' as returned by [dbGetRowCount()], "row.count", #' - `rows.affected`: the number of rows affected (for statements), #' as returned by [dbGetRowsAffected()] "rows.affected", #' - `has.completed`: a logical that indicates #' if the query or statement has completed, #' as returned by [dbHasCompleted()]. "has.completed" ) for (name in necessary_names) { eval(bquote( expect_true(.(name) %in% info_names) )) } }, # NULL ) DBItest/R/spec-arrow-create-table-arrow.R0000644000176200001440000002321114602020561017627 0ustar liggesusers#' spec_arrow_create_table_arrow #' @family Arrow specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_arrow_create_table_arrow <- list( arrow_create_table_arrow_formals = function(ctx) { skip_if_not_dbitest(ctx, "1.8.0.13") # expect_equal(names(formals(dbCreateTableArrow)), c("conn", "name", "value", "...", "temporary")) }, arrow_create_table_arrow_return = function(con, table_name) { #' @return #' `dbCreateTableArrow()` returns `TRUE`, invisibly. expect_invisible_true(dbCreateTableArrow(con, table_name, stream_frame(trivial_df()))) }, #' arrow_create_table_arrow_overwrite = function(con, table_name) { #' @section Failure modes: #' If the table exists, an error is raised; the remote table remains unchanged. test_in <- trivial_df() dbCreateTableArrow(con, table_name, test_in %>% stream_frame()) dbAppendTableArrow(con, table_name, test_in %>% stream_frame()) expect_error(dbCreateTableArrow(con, table_name, stream_frame(b = 1L))) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in) }, #' arrow_create_table_arrow_closed_connection = function(ctx, closed_con) { #' An error is raised when calling this method for a closed expect_error(dbCreateTableArrow(closed_con, "test", stream_frame(a = 1))) }, arrow_create_table_arrow_invalid_connection = function(ctx, invalid_con) { #' or invalid connection. expect_error(dbCreateTableArrow(invalid_con, "test", stream_frame(a = 1))) }, arrow_create_table_arrow_error = function(ctx, con, table_name) { #' An error is also raised test_in <- stream_frame(a = 1L) #' if `name` cannot be processed with [dbQuoteIdentifier()] or expect_error(dbCreateTableArrow(con, NA, test_in)) #' if this results in a non-scalar. expect_error(dbCreateTableArrow(con, c(table_name, table_name), test_in)) #' Invalid values for the `temporary` argument #' (non-scalars, expect_error(dbCreateTableArrow(con, table_name, test_in, temporary = c(TRUE, FALSE))) #' unsupported data types, expect_error(dbCreateTableArrow(con, table_name, fields = 1L)) expect_error(dbCreateTableArrow(con, table_name, test_in, temporary = 1L)) #' `NA`, expect_error(dbCreateTableArrow(con, table_name, fields = NA)) expect_error(dbCreateTableArrow(con, table_name, test_in, temporary = NA)) #' incompatible values, expect_error(dbCreateTableArrow(con, table_name, test_in, fields = letters)) #' duplicate names) expect_error(dbCreateTableArrow(con, table_name, fields = c(a = "INTEGER", a = "INTEGER"))) #' also raise an error. }, #' @section Additional arguments: #' The following arguments are not part of the `dbCreateTableArrow()` generic #' (to improve compatibility across backends) #' but are part of the DBI specification: #' - `temporary` (default: `FALSE`) #' #' They must be provided as named arguments. #' See the "Specification" and "Value" sections for details on their usage. arrow_create_table_arrow_name = function(ctx, con) { #' @section Specification: #' The `name` argument is processed as follows, #' to support databases that allow non-syntactic names for their objects: if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- trivial_df() local_remove_test_table(con, table_name) #' - If an unquoted table name as string: `dbCreateTableArrow()` will do the quoting, dbCreateTableArrow(con, table_name, test_in %>% stream_frame()) test_out <- check_df(dbReadTable(con, dbQuoteIdentifier(con, table_name))) expect_equal_df(test_out, test_in[0, , drop = FALSE]) #' perhaps by calling `dbQuoteIdentifier(conn, x = name)` } }, arrow_create_table_arrow_name_quoted = function(ctx, con) { #' - If the result of a call to [dbQuoteIdentifier()]: no more quoting is done skip_if_not_dbitest(ctx, "1.7.2") if (isTRUE(ctx$tweaks$strict_identifier)) { table_names <- "a" } else { table_names <- c("a", "with spaces", "with,comma") } for (table_name in table_names) { test_in <- trivial_df() local_remove_test_table(con, table_name) dbCreateTableArrow(con, dbQuoteIdentifier(con, table_name), test_in %>% stream_frame()) test_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(test_out, test_in[0, , drop = FALSE]) } }, arrow_create_table_arrow_value_df = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.5") #' #' The `value` argument can be: #' - a data frame, table_name <- "act_df" local_remove_test_table(con, table_name) df <- data.frame(a = 1) dbCreateTableArrow(con, table_name, df) expect_equal_df(dbReadTable(con, table_name), data.frame(a = numeric())) }, arrow_create_table_arrow_value_array = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.6") #' - a nanoarrow array table_name <- "act_array" local_remove_test_table(con, table_name) array <- nanoarrow::as_nanoarrow_array(data.frame(a = 1)) dbCreateTableArrow(con, table_name, array) expect_equal_df(dbReadTable(con, table_name), data.frame(a = numeric())) }, arrow_create_table_arrow_value_stream = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.7") #' - a nanoarrow array stream table_name <- "act_stream" local_remove_test_table(con, table_name) stream <- stream_frame(a = 1) dbCreateTableArrow(con, table_name, stream) expect_equal(as.data.frame(stream), data.frame(a = 1)) #' (which will still contain the data after the call) expect_equal_df(dbReadTable(con, table_name), data.frame(a = numeric())) }, arrow_create_table_arrow_value_schema = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.8") #' - a nanoarrow schema table_name <- "act_schema" local_remove_test_table(con, table_name) schema <- nanoarrow::infer_nanoarrow_schema(stream_frame(a = 1)) dbCreateTableArrow(con, table_name, schema) expect_equal_df(dbReadTable(con, table_name), data.frame(a = numeric())) }, #' arrow_create_table_arrow_temporary_1 = function(ctx, con, table_name = "dbit03") { skip_if_not_dbitest(ctx, "1.8.0.4") #' If the `temporary` argument is `TRUE`, the table is not available in a #' second connection and is gone after reconnecting. #' Not all backends support this argument. if (!isTRUE(ctx$tweaks$temporary_tables)) { skip("tweak: temporary_tables") } penguins <- get_penguins(ctx) dbCreateTableArrow(con, table_name, stream_frame(penguins), temporary = TRUE) penguins_out <- check_df(dbReadTable(con, table_name)) expect_equal_df(penguins_out, penguins[0, , drop = FALSE]) con2 <- local_connection(ctx) expect_error(dbReadTable(con2, table_name)) }, # second stage arrow_create_table_arrow_temporary_2 = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.4") # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit03" expect_error(dbReadTable(con, table_name)) }, arrow_create_table_arrow_visible_in_other_connection_1 = function(ctx, local_con) { skip_if_not_dbitest(ctx, "1.8.0.3") #' A regular, non-temporary table is visible in a second connection, penguins <- get_penguins(ctx) # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit04" dbCreateTableArrow(local_con, table_name, stream_frame(penguins)) penguins_out <- check_df(dbReadTable(local_con, table_name)) expect_equal_df(penguins_out, penguins[0, , drop = FALSE]) con2 <- local_connection(ctx) expect_equal_df(dbReadTable(con2, table_name), penguins[0, , drop = FALSE]) }, # second stage arrow_create_table_arrow_visible_in_other_connection_2 = function(ctx, con) { skip_if_not_dbitest(ctx, "1.8.0.3") penguins <- get_penguins(ctx) # table_name not in formals on purpose: this means that this table won't be # removed at the end of the test table_name <- "dbit04" #' in a pre-existing connection, expect_equal_df(check_df(dbReadTable(con, table_name)), penguins[0, , drop = FALSE]) }, # third stage arrow_create_table_arrow_visible_in_other_connection_3 = function(ctx, local_con, table_name = "dbit04") { skip_if_not_dbitest(ctx, "1.8.0.3") penguins <- get_penguins(ctx) #' and after reconnecting to the database. expect_equal_df(check_df(dbReadTable(local_con, table_name)), penguins[0, , drop = FALSE]) }, #' arrow_create_table_arrow_roundtrip_keywords = function(ctx, con) { #' SQL keywords can be used freely in table names, column names, and data. tbl_in <- data.frame( select = "unique", from = "join", where = "order", stringsAsFactors = FALSE ) test_table_roundtrip(con, tbl_in, name = "exists", use_append = TRUE) }, arrow_create_table_arrow_roundtrip_quotes = function(ctx, con) { #' Quotes, commas, and spaces can also be used for table names and column names, #' if the database supports non-syntactic identifiers. if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } table_names <- c( as.character(dbQuoteIdentifier(con, "")), as.character(dbQuoteString(con, "")), "with space", "," ) for (table_name in table_names) { tbl_in <- data.frame(trivial_df(4, table_names)) test_table_roundtrip(con, tbl_in, use_append = TRUE) } }, # NULL ) DBItest/R/spec-sql-unquote-identifier.R0000644000176200001440000001635614602020561017450 0ustar liggesusers#' spec_sql_unquote_identifier #' @family sql specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_sql_unquote_identifier <- list( unquote_identifier_formals = function() { # expect_equal(names(formals(dbUnquoteIdentifier)), c("conn", "x", "...")) }, unquote_identifier_return = function(con) { #' @return #' `dbUnquoteIdentifier()` returns a list of objects simple_in <- dbQuoteIdentifier(con, "simple") simple_out <- dbUnquoteIdentifier(con, simple_in) expect_type(simple_out, "list") }, # unquote_identifier_vectorized = function(ctx, con) { #' of the same length as the input. simple_in <- dbQuoteIdentifier(con, "simple") simple_out <- dbUnquoteIdentifier(con, simple_in) expect_equal(length(simple_out), 1L) letters_in <- dbQuoteIdentifier(con, letters) letters_out <- dbUnquoteIdentifier(con, letters_in) expect_equal(length(letters_out), length(letters_in)) #' For an empty vector, this function returns a length-0 object. empty <- character() empty_in <- dbQuoteIdentifier(con, empty) empty_out <- dbUnquoteIdentifier(con, empty_in) expect_equal(length(empty_out), 0) empty_in <- character() empty_out <- dbUnquoteIdentifier(con, empty_in) expect_equal(length(empty_out), 0) #' The names of the input argument are preserved in the output. unnamed_in <- dbQuoteIdentifier(con, letters) unnamed_out <- dbUnquoteIdentifier(con, unnamed_in) expect_null(names(unnamed_out)) named_in <- dbQuoteIdentifier(con, stats::setNames(LETTERS[1:3], letters[1:3])) named_out <- dbUnquoteIdentifier(con, named_in) expect_equal(names(named_out), letters[1:3]) #' If `x` is a value returned by `dbUnquoteIdentifier()`, #' calling `dbUnquoteIdentifier(..., dbQuoteIdentifier(..., x))` #' returns `list(x)`. expect_identical(dbUnquoteIdentifier(con, simple_out[[1]]), simple_out) expect_identical(dbUnquoteIdentifier(con, letters_out[[1]]), letters_out[1]) #' If `x` is an object of class [Id], #' calling `dbUnquoteIdentifier(..., x)` returns `list(x)`. expect_identical(dbUnquoteIdentifier(con, Id(table = "simple")), list(Id(table = "simple"))) #' (For backends it may be most convenient to return [Id] objects #' to achieve this behavior, but this is not required.) }, #' unquote_identifier_plain = function(ctx, con) { skip_if_not_dbitest(ctx, "1.7.99.15") #' Plain character vectors can also be passed to `dbUnquoteIdentifier()`. expect_identical(dbUnquoteIdentifier(con, "a"), list(Id("a"))) expect_identical(dbUnquoteIdentifier(con, "a.b"), list(Id("a", "b"))) expect_identical(dbUnquoteIdentifier(con, "a.b.c"), list(Id("a", "b", "c"))) expect_identical(dbUnquoteIdentifier(con, "a.b.c.d"), list(Id("a", "b", "c", "d"))) }, #' unquote_identifier_error = function(con) { #' @section Failure modes: #' #' An error is raised if a character vectors with a missing value is passed #' as the `x` argument. expect_error(dbUnquoteIdentifier(con, NA_character_)) expect_error(dbUnquoteIdentifier(con, c("a", NA_character_))) }, unquote_identifier_roundtrip = function(con) { #' @section Specification: #' For any character vector of length one, quoting (with [dbQuoteIdentifier()]) #' then unquoting then quoting the first element is identical to just quoting. simple_in <- dbQuoteIdentifier(con, "simple") simple_out <- dbUnquoteIdentifier(con, simple_in) simple_roundtrip <- dbQuoteIdentifier(con, simple_out[[1]]) expect_identical(simple_in, simple_roundtrip) }, # unquote_identifier_special = function(ctx, con) { #' This is also true for strings that #' contain special characters such as a space, with_space_in <- dbQuoteIdentifier(con, "with space") with_space_out <- dbUnquoteIdentifier(con, with_space_in) with_space_roundtrip <- dbQuoteIdentifier(con, with_space_out[[1]]) #' a dot, with_dot_in <- dbQuoteIdentifier(con, "with.dot") with_dot_out <- dbUnquoteIdentifier(con, with_dot_in) with_dot_roundtrip <- dbQuoteIdentifier(con, with_dot_out[[1]]) #' a comma, with_comma_in <- dbQuoteIdentifier(con, "with,comma") with_comma_out <- dbUnquoteIdentifier(con, with_comma_in) with_comma_roundtrip <- dbQuoteIdentifier(con, with_comma_out[[1]]) #' or quotes used to mark strings with_quote_in <- dbQuoteIdentifier(con, as.character(dbQuoteString(con, "a"))) with_quote_out <- dbUnquoteIdentifier(con, with_quote_in) with_quote_roundtrip <- dbQuoteIdentifier(con, with_quote_out[[1]]) #' or identifiers, quoted_with_space_in <- dbQuoteIdentifier(con, as.character(with_space_in)) quoted_with_space_out <- dbUnquoteIdentifier(con, quoted_with_space_in) quoted_with_space_roundtrip <- dbQuoteIdentifier(con, quoted_with_space_out[[1]]) quoted_with_dot_in <- dbQuoteIdentifier(con, as.character(with_dot_in)) quoted_with_dot_out <- dbUnquoteIdentifier(con, quoted_with_dot_in) quoted_with_dot_roundtrip <- dbQuoteIdentifier(con, quoted_with_dot_out[[1]]) quoted_with_comma_in <- dbQuoteIdentifier(con, as.character(with_comma_in)) quoted_with_comma_out <- dbUnquoteIdentifier(con, quoted_with_comma_in) quoted_with_comma_roundtrip <- dbQuoteIdentifier(con, quoted_with_comma_out[[1]]) quoted_with_quote_in <- dbQuoteIdentifier(con, as.character(with_quote_in)) quoted_with_quote_out <- dbUnquoteIdentifier(con, quoted_with_quote_in) quoted_with_quote_roundtrip <- dbQuoteIdentifier(con, quoted_with_quote_out[[1]]) #' if the database supports this. if (isTRUE(ctx$tweaks$strict_identifier)) { skip("tweak: strict_identifier") } expect_identical(with_space_in, with_space_roundtrip) expect_identical(with_dot_in, with_dot_roundtrip) expect_identical(with_comma_in, with_comma_roundtrip) expect_identical(with_quote_in, with_quote_roundtrip) expect_identical(quoted_with_space_in, quoted_with_space_roundtrip) expect_identical(quoted_with_dot_in, quoted_with_dot_roundtrip) expect_identical(quoted_with_comma_in, quoted_with_comma_roundtrip) expect_identical(quoted_with_quote_in, quoted_with_quote_roundtrip) }, #' unquote_identifier_simple = function(con) { #' Unquoting simple strings (consisting of only letters) wrapped with [SQL()] and #' then quoting via [dbQuoteIdentifier()] gives the same result as just #' quoting the string. simple_in <- "simple" simple_quoted <- dbQuoteIdentifier(con, simple_in) simple_out <- dbUnquoteIdentifier(con, SQL(simple_in)) simple_roundtrip <- dbQuoteIdentifier(con, simple_out[[1]]) expect_identical(simple_roundtrip, simple_quoted) }, unquote_identifier_table_schema = function(ctx, con) { #' Similarly, unquoting expressions of the form `SQL("schema.table")` #' and then quoting gives the same result as quoting the identifier #' constructed by `Id("schema", "table")`. schema_in <- "schema" table_in <- "table" simple_quoted <- dbQuoteIdentifier(con, Id(schema_in, table_in)) simple_out <- dbUnquoteIdentifier(con, SQL(paste0(schema_in, ".", table_in))) simple_roundtrip <- dbQuoteIdentifier(con, simple_out[[1]]) expect_identical(simple_roundtrip, simple_quoted) }, # NULL ) DBItest/R/spec-driver-connect.R0000644000176200001440000000606114602017371015752 0ustar liggesusers#' spec_driver_connect #' @family driver specifications #' @usage NULL #' @format NULL #' @keywords NULL spec_driver_connect <- list( connect_formals = function() { # expect_equal(names(formals(dbConnect)), c("drv", "...")) }, connect_can_connect = function(ctx) { #' @return con <- expect_visible(connect(ctx)) #' `dbConnect()` returns an S4 object that inherits from [DBIConnection-class]. expect_s4_class(con, "DBIConnection") dbDisconnect(con) #' This object is used to communicate with the database engine. }, # #' connect_format = function(con) { #' A [format()] method is defined for the connection object. desc <- format(con) #' It returns a string that consists of a single line of text. expect_type(desc, "character") expect_length(desc, 1) expect_false(grepl("\n", desc, fixed = TRUE)) }, connect_bigint_integer = function(ctx) { #' @section Specification: #' DBI recommends using the following argument names for authentication #' parameters, with `NULL` default: #' - `user` for the user name (default: current user) #' - `password` for the password #' - `host` for the host name (default: local connection) #' - `port` for the port number (default: local connection) #' - `dbname` for the name of the database on the host, or the database file #' name #' #' The defaults should provide reasonable behavior, in particular a #' local connection for `host = NULL`. For some DBMS (e.g., PostgreSQL), #' this is different to a TCP/IP connection to `localhost`. #' #' In addition, DBI supports the `bigint` argument that governs how #' 64-bit integer data is returned. The following values are supported: #' - `"integer"`: always return as `integer`, silently overflow con <- local_connection(ctx, bigint = "integer") res <- dbGetQuery(con, "SELECT 10000000000") expect_type(res[[1]], "integer") }, # connect_bigint_numeric = function(ctx) { #' - `"numeric"`: always return as `numeric`, silently round con <- local_connection(ctx, bigint = "numeric") res <- dbGetQuery(con, "SELECT 10000000000") expect_type(res[[1]], "double") expect_equal(res[[1]], 1e10) }, # connect_bigint_character = function(ctx) { #' - `"character"`: always return the decimal representation as `character` con <- local_connection(ctx, bigint = "character") res <- dbGetQuery(con, "SELECT 10000000000") expect_type(res[[1]], "character") expect_equal(res[[1]], "10000000000") }, # connect_bigint_integer64 = function(ctx) { #' - `"integer64"`: return as a data type that can be coerced using #' [as.integer()] (with warning on overflow), [as.numeric()] #' and [as.character()] con <- local_connection(ctx, bigint = "integer64") res <- dbGetQuery(con, "SELECT 10000000000") expect_warning(expect_true(is.na(as.integer(res[[1]])))) expect_equal(as.numeric(res[[1]]), 1e10) expect_equal(as.character(res[[1]]), "10000000000") }, # NULL ) DBItest/R/test-sql.R0000644000176200001440000000055314602017371013654 0ustar liggesusers#' @name test_all #' @aliases NULL #' @section Tests: #' [test_sql()]: #' Test SQL methods NULL #' Test SQL methods #' #' @inheritParams test_all #' @include test-result.R #' @family tests #' @export test_sql <- function(skip = NULL, run_only = NULL, ctx = get_default_context()) { test_suite <- "SQL" run_tests(ctx, spec_sql, skip, run_only, test_suite) } DBItest/vignettes/0000755000176200001440000000000014725060475013572 5ustar liggesusersDBItest/vignettes/DBItest.Rmd0000644000176200001440000001621514602020561015523 0ustar liggesusers--- title: "Testing DBI backends" author: "Kirill Müller" output: rmarkdown::html_vignette vignette: > %\VignetteIndexEntry{Testing DBI backends} %\VignetteEngine{knitr::rmarkdown} \usepackage[utf8]{inputenc} --- ```{r setup, include = FALSE} knitr::opts_chunk$set(error = (getRversion() < "3.5")) ``` This document shows how to use the DBItest package when implementing a new DBI backend or when applying it to an existing backend. The DBItest package provides a large collection of automated tests. ## Testing a new backend The test cases in the DBItest package are structured very similarly to the sections in the "backend" vignette: ```r vignette("backend", package = "DBI") ``` Like the "backend" vignette, this vignette assumes that you are implementing the `RKazam` package that has a `Kazam()` function that creates a new `DBIDriver` instance for connecting to a "Kazam" database. You can add the tests in the DBItest package incrementally, as you proceed with implementing the various parts of the DBI. The DBItest package builds upon the testthat package. To enable it, run the following in your package directory (after installing or updating `devtools`): ```r devtools::use_testthat() devtools::use_test("DBItest") ``` This creates, among others, a file `test-DBItest.R` in the `tests/testthat` directory. Replace its entire contents by the following: ```r DBItest::make_context(Kazam(), NULL) DBItest::test_getting_started() ``` Now test your package with `devtools::test()`. If you followed at least the "Getting started" section of the DBI "backend" vignette, all tests should succeed. By adding the corresponding test function to your `tests/test-DBItest.R` file *before* implementing a section, you get immediate feedback which functionality of this section still needs to be implemented by running `devtools::test()` again. Therefore, proceed by appending the following to `tests/test-DBItest.R`, to include a test case for the forthcoming section: ```r DBItest::test_driver() ``` Again, all tests should succeed when you are done with the "Driver" section. Add the call to the next tester function, implement the following section until all tests succeed, and so forth. In this scenario, you are usually interested only in the first error the test suite finds. The `StopReporter` of `testthat` is most helpful here, activate it by passing `reporter = "stop"` to `devtools::test()`. Alternatively, call the relevant `DBItest::test_()` function directly. The tests are documented with the corresponding functions: For instance, `?test_driver` shows a coarse description of all tests for the "Driver" test case. Test failures will include the name of the test that is failing; in this case, investigating the documentation or the source code of the DBItest package will usually lead to the cause of the error. Not all tests can be satisfied: For example, there is one test that tests that `logical` variables survive a write-read roundtrip to the database, whereas another test tests that `logical` variables are converted to `integer` in such a case. Tests can be skipped by adding regular expressions for the tests to skip as character vector to the call, as in the following[^termnull]: [^termnull]: The terminating `NULL` allows appending new lines to the end by copy-pasting an existing line, without having to take care of the terminating comma. ```r DBItest::test_driver(skip = c( "data_type" # Reason 1... "constructor.*", # Reason 2... NULL )) ``` Some other reasons to skip tests are: - your database does not support a feature - you want to postpone or avoid the implementation of a feature - the test takes too long to run. ## Testing an existing backend For an existing backends, simply enabling all tests may be the quickest way to get started. Run the following in your package directory (after installing or updating `devtools`): ```r devtools::use_testthat() devtools::use_test("DBItest") ``` This creates, among others, a file `test-DBItest.R` in the `tests/testthat` directory. Replace its entire contents by the following: ```r DBItest::make_context(Kazam(), NULL) DBItest::test_all() ``` The notes about "Kazam" and skipping tests from the previous section apply here as well. The `test_all()` function simply calls all test cases. ## External testing DBItest is currently geared towards usage as part of a package's test suite. With some effort it is possible to test a database backend against a custom database. This can help verify that your database installation gives expected results when accessed with DBI with specific connection arguments. The example below shows how to run tests with the RSQLite backend. ### Preparation First, we need to define a test context. It contains: - a connector that describes how to establish the database connection, see ``?DBI::`DBIConnector-class` `` for details, - tweaks, see `?tweaks`, - tests skipped by default, as a character vector. Database backends that use DBItest for testing usually have a file `test/testthat/helper-DBItest.R` or `test/testthat/test-DBItest.R` where a call to `make_context()` can be found. The help for `make_context()` already contains an example that works for RSQLite. Adapt it to your needs. The `make_context()` function must be called before any tests can run. ```{r make-context, error = !rlang::is_installed("RSQLite")} library(DBItest) tweaks <- tweaks( constructor_relax_args = TRUE, placeholder_pattern = c("?", "$1", "$name", ":name"), date_cast = function(x) paste0("'", x, "'"), time_cast = function(x) paste0("'", x, "'"), timestamp_cast = function(x) paste0("'", x, "'"), logical_return = function(x) as.integer(x), date_typed = FALSE, time_typed = FALSE, timestamp_typed = FALSE ) default_skip <- c("roundtrip_date", "roundtrip_timestamp") invisible(make_context( new( "DBIConnector", .drv = RSQLite::SQLite(), .conn_args = list(dbname = tempfile("DBItest", fileext = ".sqlite")) ), tweaks = tweaks, default_skip = default_skip )) ``` ### Testing Use `test_all()` to run all tests, and `test_some()` to run a specific test that failed previously. The `test_*` functions need to be run with a testthat reporter to avoid stopping at the first error or warning. For interactive use, the "progress" reporter gives good results. In the example below, the "location" and "stop" reporters are combined. Review `?testthat::Reporter` for a list of reporters. ```{r simple, error = !rlang::is_installed("RSQLite")} DBItest::test_some("get_query_atomic") ``` DBItest relies heavily on metaprogramming. Unfortunately, this means that a failing test may give no indication of the reason for the failure. The `test_some()` function now by default integrates the new experimental [dblog package](https://github.com/r-dbi/dblog) package. It prints the DBI code that is executed as the tests are run, as seen above. Another way to scout for the reason of the problem is to review the sources of DBItest and relate the test name (that is printed with each failure) with the human-readable specification embedded with the test code. ```{r location, error = !rlang::is_installed("RSQLite")} testthat::with_reporter( c("location", "fail"), DBItest::test_some("get_query_atomic") ) ``` DBItest/NAMESPACE0000644000176200001440000000167714602020561012776 0ustar liggesusers# Generated by roxygen2: do not edit by hand S3method("$",DBItest_tweaks) S3method(format,DBItest_tweaks) S3method(print,DBItest_tweaks) export(get_default_context) export(make_context) export(set_default_context) export(test_all) export(test_arrow) export(test_compliance) export(test_connection) export(test_driver) export(test_getting_started) export(test_meta) export(test_result) export(test_some) export(test_sql) export(test_stress) export(test_transaction) export(tweaks) import(DBI) import(rlang) import(testthat, except = c(is_null, is_false, is_true)) importFrom(callr,r) importFrom(lubridate,with_tz) importFrom(magrittr,"%>%") importFrom(methods,extends) importFrom(methods,findMethod) importFrom(methods,getClass) importFrom(methods,getClasses) importFrom(methods,hasMethod) importFrom(methods,is) importFrom(methods,new) importFrom(stats,setNames) importFrom(utils,head) importFrom(withr,with_output_sink) importFrom(withr,with_temp_libpaths) DBItest/NEWS.md0000644000176200001440000005722014725060265012663 0ustar liggesusers # DBItest 1.8.2 (2024-12-07) ## Chore - More explicit message on constructive errors. - Bump DBI dependency. ## Documentation - Set BS version explicitly for now (@maelle, #370). - Add package to `\link{}` targets. ## Testing - Adapt tests to updates of the constructive package. # DBItest 1.8.1 (2024-03-31) ## Features - Allow multiple warnings in disconnect tests (#363). - Fix specification for Arrow tests (#357). - Show DBItest function in backtrace (#349, #354). - Specify `value` argument for `dbCreateTable()` and `dbCreateTableArrow()` (#345). - Enable two tests for `dbGetQueryArrow()` (#342). - Relax `dbListObjects()` spec (#339, #341). ## Chore - Avoid dplyr (#364). - Remove `.dots` argument to `test_select_with_null()` (#362). - Prefer `map()` over `lapply()` (#361) and `map_*()` over `vapply()` (#356). - Bump DBI dependency to fix tests (#359). - Document sources for autogenerated files (#353), add comments to generated code (#358). - Make test names unique, with a numeric suffix (#355). - Align with RSQLite (#351). - Replace unconditional skip with versioned skip (#347). - Consistent use of `skip_if_not_dbitest()` (#346). ## Continuous integration - Modernize backends checks. ## Documentation - Use dbitemplate (@maelle, #360). - Mention `dbBindArrow()` in documentation (#350). - Minor specification fixes (#344). # DBItest 1.8.0 (2023-12-21) ## Bug fixes - Fix `create_roundtrip_keywords` and `create_roundtrip_quotes` tests (#283). ## Features - Relax specification of `dbUnquoteIdentifier()`, character vectors are now allowed too. - Specify `dbFetchChunk()` (#331), `dbFetchArrowChunk()` (#301) and `dbBindArrow()` (#328). - Inline all tests for `dbBind()` (#326). - Require support for `dbFetch(n = NA)` (#296, #316). - New `allow_na_rows_affected` tweak to support `NA` values returned from `dbGetRowsAffected()` (#297, #312). - Switch to nanoarrow (#291). - Basic tests for the new `db*Arrow()` interface (#287). - New `skip_if_not_dbitest()` (#289). - `reexport` test uses interface for dev DBI if the backend is compatible with DBItest \> 1.7.3. - Slightly better code generated for `tweaks()` (#313). - Remove interface to dblog in the CRAN version. ## CI/CD - Add adbi to check matrix (#314). - Reenable ODBC MySQL tests (#288). - Tweak `read_table_missing` test (#285). ## Chore - Remove rlang qualification (#332). - No longer need `as.data.frame()` twice for Arrow (#302, #330). - Consistent use of `skip_if_not_dbitest()` (#317). - Disable Arrow skips (#303). - Modernize `sql_union()` (#304). - Make better use of `trivial_df()` (#284). ## Documentation - Avoid error if RSQLite is not installed. ## Testing - Run DBItest for SQLite as part of the checks here (#318). - Enable remaining Arrow tests (#307). - Fix checks without suggested packages (#300). # DBItest 1.7.3 (2022-10-18) ## Features - Use and enable compatibility with testthat edition 3 (#263, #268). Complete removal of `expect_is()` (@MichaelChirico, #257). - Adapt to new Arrow DBI generics (#265). - Better stack traces for visibility tests. - `dbQuoteIdentifier()` roundtrip is tested for tables only (@dpprdan, #256). - `test_some()` also tests a test if it would normally be skipped. ## Chore - Bump minimum DBI version to 1.1.3. - Refactor DBI tests in preparation for inlining them. ## Bug fixes - Correct cleanup even if `dbIsValid()` is not implemented. # DBItest 1.7.2 (2021-12-17) ## Features - `tweaks()` gains `dbitest_version` argument to support targeting a specific version of the DBItest package. The default is 1.7.1 (#236). - Reuse database connection for most tests (#245). - New `roundtrip_date_extended`, `roundtrip_timestamp_extended`, `append_roundtrip_date_extended` and `append_roundtrip_timestamp_extended` test dates between 1800 and 2999 (#148, #249). - New `quote_literal_empty` test (#248). - New `bind_character_escape` test for binding special characters (#242). - New `bind_time_minutes_integer` test for integer durations. ## Bug fixes - All column names are specified using lowercase on input, for compatibility with Redshift (#234). - `column_info_consistent` no longer tests mangling of column names (#181). - `spec_sql_append_table` test: Remove bad argument. ## Documentation - Improve documentation: list `spec_` objects in pkgdown help index, add cross references (#128). - Add specification for `value` argument to `DBI::dbWriteTable()` (#235). ## Internal - Replace internal `with_result()`, `with_remove_test_tables()` and `with_rollback_on_error()` for better error traces (#184, #250, #251, #253). - Use `palmerpenguins::penguins` instead of `iris` (#241). - Fix MySQL ODBC test on GitHub Actions (#237). - Improve testthat 3e compatibility: remove `testthat::expect_is()` and `testthat::expect_that()` from tests (#231, @michaelquinn32). - Decompose query used for testing `dbBind()`. # DBItest 1.7.1 (2021-07-30) ## Features - Many tests now emit simpler stack traces, because the connection is opened by the test driver and not by the test itself (#187). Reduce usage of `with_remove_test_table()` for better stack traces on error (#196). Remove `with_*connection()` (#193). - `test_some()` shows DBI code via dblog (#217) if `dblog = TRUE` (#226). - New `"bind_date_integer"`, `"bind_time_seconds"` and `"bind_time_hours"` tests (#218). - New `create_table_as` tweak (#131). - `"roundtrip_time"` and `"append_roundtrip_time"` tests now also test values of class `"difftime"` with units other than `"secs"` (#199). - All tables created by the tests have the `"dbit"` prefix. Almost all tests now use random table names to avoid collisions and unrelated test failures (#197). - `"roundtrip_timestamp"` tests now accept a time zone set by the database backend (#178, #198). - Support more than one class of each type in DBI backend packages. ## Bug fixes - Fix input dataset in `"overwrite_table_missing"` test (#210, @martinstuder). - Use original test name to decide if a test is skipped (#225). - Fix reexport test: skip if package is not installed, remove checks for deprecated functions and functions not reexported (#203). ## Internal - Requires DBI 1.1.1. - Test odbc as part of the backend tests (#228). - Dynamic build matrix for backends (#221). - Compatibility with testthat 3.0.0 (#207). - Switch to GitHub Actions (#201). # DBItest 1.7.0 (2019-12-16) ## Specifications - Specify tests for `dbGetInfo()`. - Specify `immediate` argument (r-dbi/DBI#268). - Specify `dbCreateTable()` and `dbAppendTable()` (#169). - New `unquote_identifier_table_schema` test: Identifiers of the form `table.schema` can be processed with `dbUnquoteIdentifier()`. - Fix `has_completed_statement` test (#176). ## Testing infrastructure - Document how to run tests externally and how to debug tests (#165). - `test_*()` gain new `run_only = NULL` argument that allow restricting the tests to be run with a positive match. `test_some()` uses `run_only` instead of constructing a regular expression with negative lookahead. This helps troubleshooting a single test with `testthat::set_reporter(DebugReporter$new())` . - `make_context()` gains `default_skip` argument and uses the `DBIConnector` class. - Support `NULL` default value in driver constructor (#171). ## Internal - Fulfill CII badge requirements (#179, @TSchiefer). - Use debugme. - Require R 3.2. - Avoid subsetting vectors out of bounds, for consistency with vctrs. # DBItest 1.6.0 (2018-05-03) ## New checks - Now checking that `Id()` is reexported. - Support `temporary` argument in `dbRemoveTable()` (default: `FALSE`) (r-dbi/DBI#141). - Added specification for the behavior in case of duplicate column names (#137). - The `bigint` argument to `dbConnect()` is now specified. Accepts `"integer64"`, `"integer"`, `"numeric"` and `"character"`, large integers are returned as values of that type (#133). - Add specification for partially filled `field.types` argument. - Specify `dbRemoveTable(fail_if_missing = FALSE)` (r-dbi/DBI#197). - Add specification for `dbColumnInfo()` (r-dbi/DBI#75). - Add specification for `dbListFields()` (r-dbi/DBI#75). - Test that named parameters are actually matched by name in `dbBind()`, by shuffling them (#138). - Explicitly specify default `row.names = FALSE` for `dbReadTable()` and `dbWriteTable()` (#139). - Add specification for writing 64-bit values, backends must support roundtripping values returned from the database (#146). - Add specification for the `params` argument to `dbGetQuery()`, `dbSendQuery()`, `dbExecute()` and `dbSendStatement()` (#159). - Add test for `dbQuoteIdentifier()`: "The names of the input argument are preserved in the output" (r-lib/DBI#173). - Blob tests now also read and write zero bytes (\x00). - Add string encoded in Latin-1 to the character tests. - Added test for `dbIsValid()` on stale connections. ## Removed checks - Don't test selecting untyped `NULL` anymore. - Full interface compliance doesn't require a method for `dbGetInfo(DBIDriver)` for now. - Remove `"cannot_forget_disconnect"` test that fails on R-devel (#150). - Methods without `db` prefix are not checked for ellipsis in the signature anymore. - Don't specify `Inf` and `NaN` for lack of consistent support across DBMS (#142). ## Updated/corrected checks - Fix query that is supposed to generate a syntax error. - Fix typo (#147, @jonmcalder). - Implement `POSIXlt` bind test correctly. - Improve error detection for `dbBind()`. - Redesign tests for `dbBind()`, now queries of the form `SELECT CASE WHEN (? = ?) AND (? IS NULL) THEN 1.5 ELSE 2.5` are issued. The original tests were inappropriate for RMariaDB, because an untyped placeholder is returned as a blob. - Transaction tests now use `dbWriteTable()` instead of `dbCreateTable()`, because some DBMS don't support transactions for DML. - Fix timestamp tests for RMariaDB. - Fix string constants. - The `"roundtrip_timestamp"` test now correctly handles timezone information. The output timezone is ignored. - Clear result in `spec_meta_get_info_result` (#143). - Use named argument for `n` in `dbGetQuery()` call. - Minor fixes. ## Tweaks - New tweak `blob_cast` allows specifying a conversion function to the BLOB data type. - New `is_null_check` tweak that allows specifying a function that is used when checking values for `NULL`. Required for RPostgres. - New `list_temporary_tables` tweak that can be enabled independently of `temporary_tables` to indicate that the DBMS does not support listing temporary tables. ## Infrastructure - Allow running only a subset of tests in `test_all()` by specifying an environment variable. - `test_all()` and `test_some()` return `NULL` invisibly. ## Internals - Compatibility code if `DBI::dbQuoteLiteral()` is unavailable. - New `trivial_query()` replaces many hard-coded queries and uses non-integer values for better compatibility with RMariaDB. - Convert factor to character for iris data (#141). # DBItest 1.5-2 (2018-01-26) - Fix test that fails with "noLD". - Fix NOTEs on R-devel. # DBItest 1.5-1 (2017-12-10) - Remove `"cannot_forget_disconnect"` test that fails on R-devel (#150). # DBItest 1.5 (2017-06-18) Finalize specification. Most tests now come with a corresponding prose, only those where the behavior is not finally decided don't have a prose version yet (#88). New tests --------- - Test behavior of methods in presence of placeholders (#120). - Test column name mismatch behavior for appending tables (#93). - Test that `dbBind()` against factor works but raises a warning (#91). - Test roundtrip of alternating empty and non-empty strings (#42). - Test multiple columns of different types in one statement or table (#35). - Test `field.types` argument to `dbWriteTable()` (#12). - Added tests for invalid or closed connection argument to all methods that expect a connection as first argument (#117). - Enabled test that tests a missing `dbDisconnect()`. - Add test for unambiguous escaping of identifiers (rstats-db/RSQLite#123). - Reenable tests for visibility (#89). - Fix and specify 64-bit roundtrip test. - 64-bit integers only need to be coercible to `numeric` and `character` (#74). - Added roundtrip test for time values (#14). - Added tweaks for handling date, time, timestamp, ... (#53, #76). - Test that `dbFetch()` on update-only query returns warning (#66). Adapted tests ------------- - `NULL` is a valid value for the `row.names` argument, same as `FALSE`. - A column named `row_names` receives no special handling (#54). - A warning (not an error anymore) is expected when calling `dbDisconnect()` on a closed or invalid connection. - `row.names = FALSE` is now the default for methods that read or write tables. - Add `NA` to beginning and end of columns in table roundtrip tests (#24). - Stricter tests for confusion of named and unnamed SQL parameters and placeholders (#107). - Also check names of all returned data frames. - The return value for all calls to `dbGetQuery()`, `dbFetch()`, and `dbReadTable()` is now checked for consistency (all columns have the same length, length matches number of rows) (#126). - Removed stress tests that start a new session. - Allow `hms` (or other subclasses of `difftime`) to be returned as time class (#135, @jimhester). - Test that dates are of type `numeric` (#99, @jimhester). - Replace `POSIXlt` by `POSIXct` (#100, @jimhester). - Use `"PST8PDT"` instead of `"PST"` as time zone (#110, @thrasibule). - Added tests for support of `blob` objects (input and output), but backends are not required to return `blob` objects (#98). - The `logical_return`, `date_typed` and `timestamp_typed` tweaks are respected by the bind tests. - Fixed tests involving time comparison; now uses UTC timezone and compares against a `difftime`. - Tests for roundtrip of character values now includes tabs, in addition to many other special characters (#85). - Make sure at least one table exists in the `dbListTables()` test. - Fix roundtrip tests for raw columns: now expecting `NULL` and not `NA` entries for SQL NULL values. - Fix `expect_equal_df()` for list columns. - Testing that a warning is given if the user forgets to call `dbDisconnect()` or `dbClearResult()` (#103). - Numeric roundtrip accepts conversion of `NaN` to `NA` (#79). Internal -------- - Fix R CMD check errors. - Internal consistency checks (#114). - Skip patterns that don't match any of the tests now raise a warning (#84). - New `test_some()` to test individual tests (#136). - Use desc instead of devtools (#40). - All unexpected warnings are now reported as test failures (#113). - `DBItest_tweaks` class gains a `$` method, accessing an undefined tweak now raises an error. - The arguments of the `tweaks()` function now have default values that further describe their intended usage. - New `with_closed_connection(ctx = ctx, )`, `with_invalid_connection(ctx = ctx, )`, `with_result()` and `with_remove_test_table()` helpers, and `expect_visible()`, `expect_inbisible_true()`, and `expect_equal_df()` expectations for more concise tests. # DBItest 1.4 (2016-12-02) ## DBI specification - Use markdown in documentation. - Description of parametrized queries and statements (#88). - New hidden `DBIspec-wip` page for work-in-progress documentation. - Get rid of "Format" and "Usage" sections, and aliases, in the specs. ## Tests - Not testing for presence of `max.connections` element in `dbGetInfo(Driver)` (rstats-db/DBI#56). - Test multi-row binding for queries and statements (#96). - New `ellipsis` check that verifies that all implemented DBI methods contain `...` in their formals. This excludes `show()` and all methods defined in this or other packages. - Refactored `bind_` tests to use the new `parameter_pattern` tweak (#95). - Rough draft of transaction tests (#36). - New `fetch_zero_rows` test, split from `fetch_premature_close`. - The "compliance" test tests that the backend package exports exactly one subclass of each DBI virtual class. - Document and enhance test for `dbDataType("DBIDriver", "ANY")` (#88). - Minor corrections for "bind" tests. ## Internal - Isolate stress tests from main test suite (#92). - Refactor test specification in smaller modules, isolated from actual test execution (#81). This breaks the documentation of the tests, which will be substituted by a DBI specification in prose. - Align description of binding with code. - Refactor tests for `dbBind()`, test is run by `BindTester` class, and behavior is specified by members and by instances of the new `BindTesterExtra` class. - The `skip` argument to the `test_()` functions is again evaluated with `perl = TRUE` to support negative lookaheads (#33). - Use `dbSendStatement()` and `dbExecute()` where appropriate. - Avoid empty subsections in Rd documentation to satisfy `R CMD check` (#81). # DBItest 1.3 (2016-07-07) Bug fixes --------- - Fix `read_table` test when the backend actually returns the data in a different order. New tests --------- - Test `dbDataType()` on connections (#69, #75, @imanuelcostigan). - Check returned strings for UTF-8 encoding (#72). - Repeated `dbBind()` + `dbFetch()` on the same result set (#51). Features -------- - `tweaks()` gains an `...` as first argument to support future/deprecated tweaks (with a warning), and also to avoid unnamed arguments (#83). - `testthat` now shows a more accurate location for the source of errors, failures, and skips (#78). - Aggregate skipped tests, only one `skip()` call per test function. - Indicate that some tests are optional in documentation (#15). Internal -------- - New `constructor_relax_args` tweak, currently not queried. - The `ctx` argument is now explicit in the test functions. - Change underscores to dashes in file names. - Remove `testthat` compatibility hack. - New `all_have_utf8_or_ascii_encoding()` which vectorizes `has_utf8_or_ascii_encoding()`. - Test on AppVeyor (#73). - Work around regression in R 3.3.0 (fix scheduled for R 3.3.1) which affected stress tests. # DBItest 1.2 (2016-05-21) - Infrastructure - Support names for contexts (@hoesler, #67). - The `skip` argument to the test functions is now treated as a Perl regular expression to allow negative lookahead. Use `skip = "(?!test_regex).*"` to choose a single test to run (#33). - Added encoding arguments to non-ASCII string constants (#60, @hoesler). - Improve tests - `simultaneous_connections` test always closes all connections on exit (@hoesler, #68). - More generic compliance check (@hoesler, #61). - Update documentation to reflect test condition (@imanuelcostigan, #70). - `testthat` dependency - Import all of `testthat` to avoid `R CMD check` warnings. - Compatibility with dev version of `testthat` (#62). - Improve Travis builds - Use container-based builds on Travis. - Install `RPostgres` and `RMySQL` from `rstats-db`. - Install `DBI` and `testthat` from GitHub. Version 1.1 (2016-02-12) === - New feature: tweaks - New argument `tweaks` to `make_context()` (#49). - New `tweaks()`, essentially constructs a named list of tweaks but with predefined and documented argument names. - `constructor_name`, respected by the `constructor.*` tests. - `strict_identifier`, if `TRUE` all identifier must be syntactic names even if quoted. The quoting test is now split, and a part is ignored conditional to this tweak. The `roundtrip_quotes` tests also respects this tweak. - `omit_blob_tests` for DBMS that don't have a BLOB data type. - `current_needs_parens` -- some SQL dialects (e.g., BigQuery) require parentheses for the functions `current_date`, `current_time` and `current_timestamp`. - `union`, for specifying a nonstandard way of combining queries. All union queries now name each column in each subquery (required for `bigrquery`). - New tests - `dbGetInfo(Result)` (rstats-db/DBI#55). - `dbListFields()` (#26). - New `package_name` test in `test_getting_started()`. - Improved tests - Stress test now installs package in temporary library (before loading `DBI`) using `R CMD INSTALL` before loading DBI (rstats-db/RSQLite#128, #48). - Row count is now tested for equality but not identity, so that backends can return a numeric value > 2^31 at their discretion. - Call `dbRemoveTable()` instead of issuing `DROP` requests, the latter might be unsupported. - Use subqueries in queries that use `WHERE`. - Test that `dbClearResult()` on a closed result set raises a warning. - Expect a warning instead of an error for double disconnect (#50). - Move connection test that requires `dbFetch()` to `test_result()`. - Split `can_connect_and_disconnect` test. - Expect `DBI` to be in `Imports`, not in `Depends`. - Removed tests - Remove test for `dbGetException()` (rstats-db/DBI#51). - Bug fixes - Fix broken tests for quoting. - Self-testing - Test `RPostgres`, `RMySQL`, `RSQLite` and `RKazam` as part of the Travis-CI tests (#52). - Travis CI now installs rstats-db/DBI, updated namespace imports (`dbiCheckCompliance()`, `dbListResults()`). - Use fork of `testthat`. - Utilities - Return test results as named array of logical. Requires hadley/testthat#360, gracefully degrades with the CRAN version. - Internal - Refactored the `get_info_()` tests to use a vector of names. - Use versioned dependency for DBI - Use unqualified calls to `dbBind()` again Version 1.0 (2015-12-17) === - CRAN release - Eliminate errors on win-builder - Satisfy R CMD check - Use LGPL-2 license - Add RStudio as copyright holder - Move `devtools` package from "Imports" to "Suggests" Version 0.3 (2015-11-15) === - Feature-complete, ready for review - Tests from the proposal - Add missing methods to compliance check - Add simple read-only test (#27) - Add stress tests for repeated load/unload (with and without connecting) in new R session (#2), - Migrate all tests from existing backends (#28) - Refactor `data_` tests to use a worker function `test_select()` - Test tables with `NA` values above and below the non-`NA` value in `data_` tests - Test return values and error conditions for `dbBind()` and `dbClearResult()` (#31) - Test vectorization of `dbQuoteString()` and `dbQuoteIdentifier()` (#18) - Test that dates have `integer` as underlying data type (#9) - Roundtrip tests sort output table to be sure (#32) - Test `NA` to `NULL` conversion in `dbQuoteString()`, and false friends (#23) - Enhance test for `dbQuoteIdentifier()` (#30) - Style - Avoid using `data.frame()` for date and time columns (#10) - Use `expect_identical()` instead of `expect_equal()` in many places (#13) - Catch all errors in `on.exit()` handlers via `expect_error()` (#20). - Combine "meta" tests into new `test_meta()` (#37) - Documentation - New "test" vignette (#16) - Add package documentation (#38) - Same as 0.2-5 Version 0.2 (2015-11-11) === - Tests from the proposal - SQL - Metadata - DBI compliance (not testing read-only yet) - Migrate most of the tests from RMySQL - Test improvements - Test BLOB data type (#17) - Check actual availability of type returned by `dbDataType()` (#19) - Testing infrastructure - Disambiguate test names (#21) - Use regex matching for deciding skipped tests, skip regex must match the entire test name - Documentation - Document all tests in each test function using the new inline documentation feature of roxygen2 - Improve documentation for `test_all()`: Tests are listed in new "Tests" section - Add brief instructions to README - Move repository to rstats-db namespace - Same as 0.1-6 Version 0.1 (2015-10-11) === - First GitHub release - Builds successfully on Travis - Testing infrastructure - Test context - Skipped tests call `skip()` - Function `test_all()` that runs all tests - Tests from the proposal - Getting started - Driver - Connection - Results - Code formatting is checked with lintr - Same as 0.0-5 DBItest/inst/0000755000176200001440000000000014725060475012537 5ustar liggesusersDBItest/inst/doc/0000755000176200001440000000000014725060475013304 5ustar liggesusersDBItest/inst/doc/DBItest.Rmd0000644000176200001440000001621514602020561015235 0ustar liggesusers--- title: "Testing DBI backends" author: "Kirill Müller" output: rmarkdown::html_vignette vignette: > %\VignetteIndexEntry{Testing DBI backends} %\VignetteEngine{knitr::rmarkdown} \usepackage[utf8]{inputenc} --- ```{r setup, include = FALSE} knitr::opts_chunk$set(error = (getRversion() < "3.5")) ``` This document shows how to use the DBItest package when implementing a new DBI backend or when applying it to an existing backend. The DBItest package provides a large collection of automated tests. ## Testing a new backend The test cases in the DBItest package are structured very similarly to the sections in the "backend" vignette: ```r vignette("backend", package = "DBI") ``` Like the "backend" vignette, this vignette assumes that you are implementing the `RKazam` package that has a `Kazam()` function that creates a new `DBIDriver` instance for connecting to a "Kazam" database. You can add the tests in the DBItest package incrementally, as you proceed with implementing the various parts of the DBI. The DBItest package builds upon the testthat package. To enable it, run the following in your package directory (after installing or updating `devtools`): ```r devtools::use_testthat() devtools::use_test("DBItest") ``` This creates, among others, a file `test-DBItest.R` in the `tests/testthat` directory. Replace its entire contents by the following: ```r DBItest::make_context(Kazam(), NULL) DBItest::test_getting_started() ``` Now test your package with `devtools::test()`. If you followed at least the "Getting started" section of the DBI "backend" vignette, all tests should succeed. By adding the corresponding test function to your `tests/test-DBItest.R` file *before* implementing a section, you get immediate feedback which functionality of this section still needs to be implemented by running `devtools::test()` again. Therefore, proceed by appending the following to `tests/test-DBItest.R`, to include a test case for the forthcoming section: ```r DBItest::test_driver() ``` Again, all tests should succeed when you are done with the "Driver" section. Add the call to the next tester function, implement the following section until all tests succeed, and so forth. In this scenario, you are usually interested only in the first error the test suite finds. The `StopReporter` of `testthat` is most helpful here, activate it by passing `reporter = "stop"` to `devtools::test()`. Alternatively, call the relevant `DBItest::test_()` function directly. The tests are documented with the corresponding functions: For instance, `?test_driver` shows a coarse description of all tests for the "Driver" test case. Test failures will include the name of the test that is failing; in this case, investigating the documentation or the source code of the DBItest package will usually lead to the cause of the error. Not all tests can be satisfied: For example, there is one test that tests that `logical` variables survive a write-read roundtrip to the database, whereas another test tests that `logical` variables are converted to `integer` in such a case. Tests can be skipped by adding regular expressions for the tests to skip as character vector to the call, as in the following[^termnull]: [^termnull]: The terminating `NULL` allows appending new lines to the end by copy-pasting an existing line, without having to take care of the terminating comma. ```r DBItest::test_driver(skip = c( "data_type" # Reason 1... "constructor.*", # Reason 2... NULL )) ``` Some other reasons to skip tests are: - your database does not support a feature - you want to postpone or avoid the implementation of a feature - the test takes too long to run. ## Testing an existing backend For an existing backends, simply enabling all tests may be the quickest way to get started. Run the following in your package directory (after installing or updating `devtools`): ```r devtools::use_testthat() devtools::use_test("DBItest") ``` This creates, among others, a file `test-DBItest.R` in the `tests/testthat` directory. Replace its entire contents by the following: ```r DBItest::make_context(Kazam(), NULL) DBItest::test_all() ``` The notes about "Kazam" and skipping tests from the previous section apply here as well. The `test_all()` function simply calls all test cases. ## External testing DBItest is currently geared towards usage as part of a package's test suite. With some effort it is possible to test a database backend against a custom database. This can help verify that your database installation gives expected results when accessed with DBI with specific connection arguments. The example below shows how to run tests with the RSQLite backend. ### Preparation First, we need to define a test context. It contains: - a connector that describes how to establish the database connection, see ``?DBI::`DBIConnector-class` `` for details, - tweaks, see `?tweaks`, - tests skipped by default, as a character vector. Database backends that use DBItest for testing usually have a file `test/testthat/helper-DBItest.R` or `test/testthat/test-DBItest.R` where a call to `make_context()` can be found. The help for `make_context()` already contains an example that works for RSQLite. Adapt it to your needs. The `make_context()` function must be called before any tests can run. ```{r make-context, error = !rlang::is_installed("RSQLite")} library(DBItest) tweaks <- tweaks( constructor_relax_args = TRUE, placeholder_pattern = c("?", "$1", "$name", ":name"), date_cast = function(x) paste0("'", x, "'"), time_cast = function(x) paste0("'", x, "'"), timestamp_cast = function(x) paste0("'", x, "'"), logical_return = function(x) as.integer(x), date_typed = FALSE, time_typed = FALSE, timestamp_typed = FALSE ) default_skip <- c("roundtrip_date", "roundtrip_timestamp") invisible(make_context( new( "DBIConnector", .drv = RSQLite::SQLite(), .conn_args = list(dbname = tempfile("DBItest", fileext = ".sqlite")) ), tweaks = tweaks, default_skip = default_skip )) ``` ### Testing Use `test_all()` to run all tests, and `test_some()` to run a specific test that failed previously. The `test_*` functions need to be run with a testthat reporter to avoid stopping at the first error or warning. For interactive use, the "progress" reporter gives good results. In the example below, the "location" and "stop" reporters are combined. Review `?testthat::Reporter` for a list of reporters. ```{r simple, error = !rlang::is_installed("RSQLite")} DBItest::test_some("get_query_atomic") ``` DBItest relies heavily on metaprogramming. Unfortunately, this means that a failing test may give no indication of the reason for the failure. The `test_some()` function now by default integrates the new experimental [dblog package](https://github.com/r-dbi/dblog) package. It prints the DBI code that is executed as the tests are run, as seen above. Another way to scout for the reason of the problem is to review the sources of DBItest and relate the test name (that is printed with each failure) with the human-readable specification embedded with the test code. ```{r location, error = !rlang::is_installed("RSQLite")} testthat::with_reporter( c("location", "fail"), DBItest::test_some("get_query_atomic") ) ``` DBItest/inst/doc/DBItest.R0000644000176200001440000000223014725060474014721 0ustar liggesusers## ----setup, include = FALSE--------------------------------------------------- knitr::opts_chunk$set(error = (getRversion() < "3.5")) ## ----make-context, error = !rlang::is_installed("RSQLite")-------------------- library(DBItest) tweaks <- tweaks( constructor_relax_args = TRUE, placeholder_pattern = c("?", "$1", "$name", ":name"), date_cast = function(x) paste0("'", x, "'"), time_cast = function(x) paste0("'", x, "'"), timestamp_cast = function(x) paste0("'", x, "'"), logical_return = function(x) as.integer(x), date_typed = FALSE, time_typed = FALSE, timestamp_typed = FALSE ) default_skip <- c("roundtrip_date", "roundtrip_timestamp") invisible(make_context( new( "DBIConnector", .drv = RSQLite::SQLite(), .conn_args = list(dbname = tempfile("DBItest", fileext = ".sqlite")) ), tweaks = tweaks, default_skip = default_skip )) ## ----simple, error = !rlang::is_installed("RSQLite")-------------------------- DBItest::test_some("get_query_atomic") ## ----location, error = !rlang::is_installed("RSQLite")------------------------ testthat::with_reporter( c("location", "fail"), DBItest::test_some("get_query_atomic") ) DBItest/inst/doc/DBItest.html0000644000176200001440000005505414725060475015501 0ustar liggesusers Testing DBI backends

Testing DBI backends

Kirill Müller

This document shows how to use the DBItest package when implementing a new DBI backend or when applying it to an existing backend. The DBItest package provides a large collection of automated tests.

Testing a new backend

The test cases in the DBItest package are structured very similarly to the sections in the “backend†vignette:

vignette("backend", package = "DBI")

Like the “backend†vignette, this vignette assumes that you are implementing the RKazam package that has a Kazam() function that creates a new DBIDriver instance for connecting to a “Kazam†database.

You can add the tests in the DBItest package incrementally, as you proceed with implementing the various parts of the DBI. The DBItest package builds upon the testthat package. To enable it, run the following in your package directory (after installing or updating devtools):

devtools::use_testthat()
devtools::use_test("DBItest")

This creates, among others, a file test-DBItest.R in the tests/testthat directory. Replace its entire contents by the following:

DBItest::make_context(Kazam(), NULL)
DBItest::test_getting_started()

Now test your package with devtools::test(). If you followed at least the “Getting started†section of the DBI “backend†vignette, all tests should succeed.

By adding the corresponding test function to your tests/test-DBItest.R file before implementing a section, you get immediate feedback which functionality of this section still needs to be implemented by running devtools::test() again. Therefore, proceed by appending the following to tests/test-DBItest.R, to include a test case for the forthcoming section:

DBItest::test_driver()

Again, all tests should succeed when you are done with the “Driver†section. Add the call to the next tester function, implement the following section until all tests succeed, and so forth.

In this scenario, you are usually interested only in the first error the test suite finds. The StopReporter of testthat is most helpful here, activate it by passing reporter = "stop" to devtools::test(). Alternatively, call the relevant DBItest::test_() function directly.

The tests are documented with the corresponding functions: For instance, ?test_driver shows a coarse description of all tests for the “Driver†test case. Test failures will include the name of the test that is failing; in this case, investigating the documentation or the source code of the DBItest package will usually lead to the cause of the error.

Not all tests can be satisfied: For example, there is one test that tests that logical variables survive a write-read roundtrip to the database, whereas another test tests that logical variables are converted to integer in such a case. Tests can be skipped by adding regular expressions for the tests to skip as character vector to the call, as in the following1:

DBItest::test_driver(skip = c(
  "data_type"           # Reason 1...
  "constructor.*",      # Reason 2...
  NULL
))

Some other reasons to skip tests are: - your database does not support a feature - you want to postpone or avoid the implementation of a feature - the test takes too long to run.

Testing an existing backend

For an existing backends, simply enabling all tests may be the quickest way to get started. Run the following in your package directory (after installing or updating devtools):

devtools::use_testthat()
devtools::use_test("DBItest")

This creates, among others, a file test-DBItest.R in the tests/testthat directory. Replace its entire contents by the following:

DBItest::make_context(Kazam(), NULL)
DBItest::test_all()

The notes about “Kazam†and skipping tests from the previous section apply here as well. The test_all() function simply calls all test cases.

External testing

DBItest is currently geared towards usage as part of a package’s test suite. With some effort it is possible to test a database backend against a custom database. This can help verify that your database installation gives expected results when accessed with DBI with specific connection arguments.

The example below shows how to run tests with the RSQLite backend.

Preparation

First, we need to define a test context. It contains:

  • a connector that describes how to establish the database connection, see ?DBI::`DBIConnector-class` for details,
  • tweaks, see ?tweaks,
  • tests skipped by default, as a character vector.

Database backends that use DBItest for testing usually have a file test/testthat/helper-DBItest.R or test/testthat/test-DBItest.R where a call to make_context() can be found. The help for make_context() already contains an example that works for RSQLite. Adapt it to your needs.

The make_context() function must be called before any tests can run.

library(DBItest)

tweaks <- tweaks(
  constructor_relax_args = TRUE,
  placeholder_pattern = c("?", "$1", "$name", ":name"),
  date_cast = function(x) paste0("'", x, "'"),
  time_cast = function(x) paste0("'", x, "'"),
  timestamp_cast = function(x) paste0("'", x, "'"),
  logical_return = function(x) as.integer(x),
  date_typed = FALSE,
  time_typed = FALSE,
  timestamp_typed = FALSE
)

default_skip <- c("roundtrip_date", "roundtrip_timestamp")

invisible(make_context(
  new(
    "DBIConnector",
    .drv = RSQLite::SQLite(),
    .conn_args = list(dbname = tempfile("DBItest", fileext = ".sqlite"))
  ),
  tweaks = tweaks,
  default_skip = default_skip
))

Testing

Use test_all() to run all tests, and test_some() to run a specific test that failed previously. The test_* functions need to be run with a testthat reporter to avoid stopping at the first error or warning. For interactive use, the “progress†reporter gives good results. In the example below, the “location†and “stop†reporters are combined. Review ?testthat::Reporter for a list of reporters.

DBItest::test_some("get_query_atomic")
## Test passed

DBItest relies heavily on metaprogramming. Unfortunately, this means that a failing test may give no indication of the reason for the failure. The test_some() function now by default integrates the new experimental dblog package package. It prints the DBI code that is executed as the tests are run, as seen above.

Another way to scout for the reason of the problem is to review the sources of DBItest and relate the test name (that is printed with each failure) with the human-readable specification embedded with the test code.

testthat::with_reporter(
  c("location", "fail"),
  DBItest::test_some("get_query_atomic")
)
## Start test: DBItest: Result: get_query_atomic
##    [success]
##    [success]
##    [success]
##    [success]
##    [success]
##    [success]
##    [success]
## End test: DBItest: Result: get_query_atomic

  1. The terminating NULL allows appending new lines to the end by copy-pasting an existing line, without having to take care of the terminating comma.↩︎

DBItest/inst/WORDLIST0000644000176200001440000000066014602017371013722 0ustar liggesusersAppVeyor CII CMD Codecov coercible config DateTimeClasses dbi DBIDriver DBIResult dblog debugme desc dev devtools difftime DML extensibility hadley hostname Kazam lintr lookahead lookaheads metaprogramming noLD NOTEs parametrized README Reenable RMariaDB RMySQL roundtrip Roundtrip roundtripping roxygen RPostgres RSQLite rstats sql subclasses subqueries subquery termnull testthat untyped vctrs vectorization vectorized vectorizes DBItest/README.md0000644000176200001440000000431114602020561013022 0ustar liggesusers# DBItest [![rcc](https://github.com/r-dbi/DBItest/workflows/rcc/badge.svg)](https://github.com/r-dbi/DBItest/actions) [![Codecov test coverage](https://codecov.io/gh/r-dbi/DBItest/branch/main/graph/badge.svg)](https://app.codecov.io/gh/r-dbi/DBItest?branch=main) [![CRAN_Status_Badge](https://www.r-pkg.org/badges/version/DBItest)](https://cran.r-project.org/package=DBItest) [![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/3503/badge)](https://bestpractices.coreinfrastructure.org/projects/3503) This package is primarily useful for developers of [DBI](https://dbi.r-dbi.org) backends. It provides a considerable set of test cases for DBI backends. These test cases correspond to the [DBI specification](https://dbi.r-dbi.org/articles/spec). Please follow the steps below to add these test cases to your DBI backend. ## Installation Install from CRAN via ```r install.packages("DBItest") ``` or the development version using ```r devtools::install_github("r-dbi/DBItest") ``` ## Usage In your driver package, add `DBItest` to the `Suggests:` and enable the tests. Run the following code in you package's directory: ```r # install.packages("usethis") usethis::use_package("DBItest", "suggests") usethis::use_test("DBItest") ``` This enables testing using `testthat` (if necessary) and creates, among others, a file `test-DBItest.R` in the `tests/testthat` directory. Replace its entire contents by the following: ```r DBItest::make_context(Kazam(), NULL) DBItest::test_all() ``` This assumes that `Kazam()` returns an instance of your `DBIDriver` class. Additional arguments to `dbConnect()` are specified as named list instead of the `NULL` argument to `make_context()`. The `default_skip` argument to `make_context()` allows skipping tests that are not (yet) satisfied by your backend. Further reading: - Detailed instructions in `vignette("DBItest")` - The feature list in the [original proposal](https://github.com/r-dbi/DBItest/wiki/Proposal). --- Please note that the 'DBItest' project is released with a [Contributor Code of Conduct](https://dbitest.r-dbi.org/CODE_OF_CONDUCT.html). By contributing to this project, you agree to abide by its terms. DBItest/build/0000755000176200001440000000000014725060475012661 5ustar liggesusersDBItest/build/vignette.rds0000644000176200001440000000032014725060475015213 0ustar liggesusers‹‹àb```b`aab`b2™… 1# 'ævqò,I-.Ñ ÊMA“ Šgæ¥+•($%&g§æ¥£©áiÏ(ÉÍA“ã„ t†0X¢€nB(dÍKÌME7™Ý%µjá?ìúÿ£iáðN­,Ï/‚éAQÃUÃâ–™“ ³7$³Îapqƒ2ƒÐÝ€a>Šû9‹òËõ`~àzøèMÎI,F÷(WJbI¢^ZP?ÈÝ¡d'½¶DBItest/man/0000755000176200001440000000000014722017730012326 5ustar liggesusersDBItest/man/spec_driver_connect.Rd0000644000176200001440000000374414722017730016643 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-driver-connect.R \docType{data} \name{spec_driver_connect} \alias{spec_driver_connect} \title{spec_driver_connect} \value{ \code{dbConnect()} returns an S4 object that inherits from \link[DBI:DBIConnection-class]{DBI::DBIConnection}. This object is used to communicate with the database engine. A \code{\link[=format]{format()}} method is defined for the connection object. It returns a string that consists of a single line of text. } \description{ spec_driver_connect } \section{Specification}{ DBI recommends using the following argument names for authentication parameters, with \code{NULL} default: \itemize{ \item \code{user} for the user name (default: current user) \item \code{password} for the password \item \code{host} for the host name (default: local connection) \item \code{port} for the port number (default: local connection) \item \code{dbname} for the name of the database on the host, or the database file name } The defaults should provide reasonable behavior, in particular a local connection for \code{host = NULL}. For some DBMS (e.g., PostgreSQL), this is different to a TCP/IP connection to \code{localhost}. In addition, DBI supports the \code{bigint} argument that governs how 64-bit integer data is returned. The following values are supported: \itemize{ \item \code{"integer"}: always return as \code{integer}, silently overflow \item \code{"numeric"}: always return as \code{numeric}, silently round \item \code{"character"}: always return the decimal representation as \code{character} \item \code{"integer64"}: return as a data type that can be coerced using \code{\link[=as.integer]{as.integer()}} (with warning on overflow), \code{\link[=as.numeric]{as.numeric()}} and \code{\link[=as.character]{as.character()}} } } \seealso{ Other driver specifications: \code{\link{spec_driver_constructor}}, \code{\link{spec_driver_data_type}}, \code{\link{spec_get_info}} } \concept{driver specifications} DBItest/man/spec_result_send_query.Rd0000644000176200001440000000751614722017730017414 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-send-query.R \docType{data} \name{spec_result_send_query} \alias{spec_result_send_query} \title{spec_result_send_query} \value{ \code{dbSendQuery()} returns an S4 object that inherits from \link[DBI:DBIResult-class]{DBI::DBIResult}. The result set can be used with \code{\link[DBI:dbFetch]{DBI::dbFetch()}} to extract records. Once you have finished using a result, make sure to clear it with \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}}. } \description{ spec_result_send_query } \section{Failure modes}{ An error is raised when issuing a query over a closed or invalid connection, or if the query is not a non-\code{NA} string. An error is also raised if the syntax of the query is invalid and all query parameters are given (by passing the \code{params} argument) or the \code{immediate} argument is set to \code{TRUE}. } \section{Additional arguments}{ The following arguments are not part of the \code{dbSendQuery()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{params} (default: \code{NULL}) \item \code{immediate} (default: \code{NULL}) } They must be provided as named arguments. See the "Specification" sections for details on their usage. } \section{Specification}{ No warnings occur under normal conditions. When done, the DBIResult object must be cleared with a call to \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}}. Failure to clear the result set leads to a warning when the connection is closed. If the backend supports only one open result set per connection, issuing a second query invalidates an already open result set and raises a warning. The newly opened result set is valid and must be cleared with \code{dbClearResult()}. The \code{param} argument allows passing query parameters, see \code{\link[DBI:dbBind]{DBI::dbBind()}} for details. } \section{Specification for the \code{immediate} argument}{ The \code{immediate} argument supports distinguishing between "direct" and "prepared" APIs offered by many database drivers. Passing \code{immediate = TRUE} leads to immediate execution of the query or statement, via the "direct" API (if supported by the driver). The default \code{NULL} means that the backend should choose whatever API makes the most sense for the database, and (if relevant) tries the other API if the first attempt fails. A successful second attempt should result in a message that suggests passing the correct \code{immediate} argument. Examples for possible behaviors: \enumerate{ \item DBI backend defaults to \code{immediate = TRUE} internally \enumerate{ \item A query without parameters is passed: query is executed \item A query with parameters is passed: \enumerate{ \item \code{params} not given: rejected immediately by the database because of a syntax error in the query, the backend tries \code{immediate = FALSE} (and gives a message) \item \code{params} given: query is executed using \code{immediate = FALSE} } } \item DBI backend defaults to \code{immediate = FALSE} internally \enumerate{ \item A query without parameters is passed: \enumerate{ \item simple query: query is executed \item "special" query (such as setting a config options): fails, the backend tries \code{immediate = TRUE} (and gives a message) } \item A query with parameters is passed: \enumerate{ \item \code{params} not given: waiting for parameters via \code{\link[DBI:dbBind]{DBI::dbBind()}} \item \code{params} given: query is executed } } } } \seealso{ Other result specifications: \code{\link{spec_result_clear_result}}, \code{\link{spec_result_create_table_with_data_type}}, \code{\link{spec_result_execute}}, \code{\link{spec_result_fetch}}, \code{\link{spec_result_get_query}}, \code{\link{spec_result_roundtrip}}, \code{\link{spec_result_send_statement}} } \concept{result specifications} DBItest/man/test_arrow.Rd0000644000176200001440000000236714602020561015007 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-arrow.R \name{test_arrow} \alias{test_arrow} \title{Test Arrow methods} \usage{ test_arrow(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name minus a possible suffix \verb{_N} where \code{N} is a number. For example, \code{skip = "exists_table"} will skip both \code{"exists_table_1"} and \code{"exists_table_2"}.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test Arrow methods } \seealso{ Other tests: \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/man/spec_result_send_statement.Rd0000644000176200001440000000764614722017730020257 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-send-statement.R \docType{data} \name{spec_result_send_statement} \alias{spec_result_send_statement} \title{spec_result_send_statement} \value{ \code{dbSendStatement()} returns an S4 object that inherits from \link[DBI:DBIResult-class]{DBI::DBIResult}. The result set can be used with \code{\link[DBI:dbGetRowsAffected]{DBI::dbGetRowsAffected()}} to determine the number of rows affected by the query. Once you have finished using a result, make sure to clear it with \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}}. } \description{ spec_result_send_statement } \section{Failure modes}{ An error is raised when issuing a statement over a closed or invalid connection, or if the statement is not a non-\code{NA} string. An error is also raised if the syntax of the query is invalid and all query parameters are given (by passing the \code{params} argument) or the \code{immediate} argument is set to \code{TRUE}. } \section{Specification}{ No warnings occur under normal conditions. When done, the DBIResult object must be cleared with a call to \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}}. Failure to clear the result set leads to a warning when the connection is closed. If the backend supports only one open result set per connection, issuing a second query invalidates an already open result set and raises a warning. The newly opened result set is valid and must be cleared with \code{dbClearResult()}. The \code{param} argument allows passing query parameters, see \code{\link[DBI:dbBind]{DBI::dbBind()}} for details. } \section{Additional arguments}{ The following arguments are not part of the \code{dbSendStatement()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{params} (default: \code{NULL}) \item \code{immediate} (default: \code{NULL}) } They must be provided as named arguments. See the "Specification" sections for details on their usage. } \section{Specification for the \code{immediate} argument}{ The \code{immediate} argument supports distinguishing between "direct" and "prepared" APIs offered by many database drivers. Passing \code{immediate = TRUE} leads to immediate execution of the query or statement, via the "direct" API (if supported by the driver). The default \code{NULL} means that the backend should choose whatever API makes the most sense for the database, and (if relevant) tries the other API if the first attempt fails. A successful second attempt should result in a message that suggests passing the correct \code{immediate} argument. Examples for possible behaviors: \enumerate{ \item DBI backend defaults to \code{immediate = TRUE} internally \enumerate{ \item A query without parameters is passed: query is executed \item A query with parameters is passed: \enumerate{ \item \code{params} not given: rejected immediately by the database because of a syntax error in the query, the backend tries \code{immediate = FALSE} (and gives a message) \item \code{params} given: query is executed using \code{immediate = FALSE} } } \item DBI backend defaults to \code{immediate = FALSE} internally \enumerate{ \item A query without parameters is passed: \enumerate{ \item simple query: query is executed \item "special" query (such as setting a config options): fails, the backend tries \code{immediate = TRUE} (and gives a message) } \item A query with parameters is passed: \enumerate{ \item \code{params} not given: waiting for parameters via \code{\link[DBI:dbBind]{DBI::dbBind()}} \item \code{params} given: query is executed } } } } \seealso{ Other result specifications: \code{\link{spec_result_clear_result}}, \code{\link{spec_result_create_table_with_data_type}}, \code{\link{spec_result_execute}}, \code{\link{spec_result_fetch}}, \code{\link{spec_result_get_query}}, \code{\link{spec_result_roundtrip}}, \code{\link{spec_result_send_query}} } \concept{result specifications} DBItest/man/spec_arrow_send_query_arrow.Rd0000644000176200001440000000766714722017730020451 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-arrow-send-query-arrow.R \docType{data} \name{spec_arrow_send_query_arrow} \alias{spec_arrow_send_query_arrow} \title{spec_result_send_query} \value{ \code{dbSendQueryArrow()} returns an S4 object that inherits from \link[DBI:DBIResultArrow-class]{DBI::DBIResultArrow}. The result set can be used with \code{\link[DBI:dbFetchArrow]{DBI::dbFetchArrow()}} to extract records. Once you have finished using a result, make sure to clear it with \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}}. } \description{ spec_result_send_query } \section{Failure modes}{ An error is raised when issuing a query over a closed or invalid connection, or if the query is not a non-\code{NA} string. An error is also raised if the syntax of the query is invalid and all query parameters are given (by passing the \code{params} argument) or the \code{immediate} argument is set to \code{TRUE}. } \section{Additional arguments}{ The following arguments are not part of the \code{dbSendQueryArrow()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{params} (default: \code{NULL}) \item \code{immediate} (default: \code{NULL}) } They must be provided as named arguments. See the "Specification" sections for details on their usage. } \section{Specification}{ No warnings occur under normal conditions. When done, the DBIResult object must be cleared with a call to \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}}. Failure to clear the result set leads to a warning when the connection is closed. If the backend supports only one open result set per connection, issuing a second query invalidates an already open result set and raises a warning. The newly opened result set is valid and must be cleared with \code{dbClearResult()}. The \code{param} argument allows passing query parameters, see \code{\link[DBI:dbBind]{DBI::dbBind()}} for details. } \section{Specification for the \code{immediate} argument}{ The \code{immediate} argument supports distinguishing between "direct" and "prepared" APIs offered by many database drivers. Passing \code{immediate = TRUE} leads to immediate execution of the query or statement, via the "direct" API (if supported by the driver). The default \code{NULL} means that the backend should choose whatever API makes the most sense for the database, and (if relevant) tries the other API if the first attempt fails. A successful second attempt should result in a message that suggests passing the correct \code{immediate} argument. Examples for possible behaviors: \enumerate{ \item DBI backend defaults to \code{immediate = TRUE} internally \enumerate{ \item A query without parameters is passed: query is executed \item A query with parameters is passed: \enumerate{ \item \code{params} not given: rejected immediately by the database because of a syntax error in the query, the backend tries \code{immediate = FALSE} (and gives a message) \item \code{params} given: query is executed using \code{immediate = FALSE} } } \item DBI backend defaults to \code{immediate = FALSE} internally \enumerate{ \item A query without parameters is passed: \enumerate{ \item simple query: query is executed \item "special" query (such as setting a config options): fails, the backend tries \code{immediate = TRUE} (and gives a message) } \item A query with parameters is passed: \enumerate{ \item \code{params} not given: waiting for parameters via \code{\link[DBI:dbBind]{DBI::dbBind()}} \item \code{params} given: query is executed } } } } \seealso{ Other Arrow specifications: \code{\link{spec_arrow_append_table_arrow}}, \code{\link{spec_arrow_create_table_arrow}}, \code{\link{spec_arrow_fetch_arrow}}, \code{\link{spec_arrow_fetch_arrow_chunk}}, \code{\link{spec_arrow_get_query_arrow}}, \code{\link{spec_arrow_read_table_arrow}}, \code{\link{spec_arrow_write_table_arrow}}, \code{\link{spec_result_clear_result}} } \concept{Arrow specifications} DBItest/man/spec_arrow_append_table_arrow.Rd0000644000176200001440000000662714722017730020704 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-arrow-append-table-arrow.R \docType{data} \name{spec_arrow_append_table_arrow} \alias{spec_arrow_append_table_arrow} \title{spec_arrow_append_table_arrow} \value{ \code{dbAppendTableArrow()} returns a scalar numeric. } \description{ spec_arrow_append_table_arrow } \section{Failure modes}{ If the table does not exist, or the new data in \code{values} is not a data frame or has different column names, an error is raised; the remote table remains unchanged. An error is raised when calling this method for a closed or invalid connection. An error is also raised if \code{name} cannot be processed with \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}} or if this results in a non-scalar. } \section{Specification}{ SQL keywords can be used freely in table names, column names, and data. Quotes, commas, spaces, and other special characters such as newlines and tabs, can also be used in the data, and, if the database supports non-syntactic identifiers, also for table names and column names. The following data types must be supported at least, and be read identically with \code{\link[DBI:dbReadTable]{DBI::dbReadTable()}}: \itemize{ \item integer \item numeric (the behavior for \code{Inf} and \code{NaN} is not specified) \item logical \item \code{NA} as NULL \item 64-bit values (using \code{"bigint"} as field type); the result can be \itemize{ \item converted to a numeric, which may lose precision, \item converted a character vector, which gives the full decimal representation \item written to another table and read again unchanged } \item character (in both UTF-8 and native encodings), supporting empty strings (before and after non-empty strings) \item factor (possibly returned as character) \item objects of type \link[blob:blob]{blob::blob} (if supported by the database) \item date (if supported by the database; returned as \code{Date}) also for dates prior to 1970 or 1900 or after 2038 \item time (if supported by the database; returned as objects that inherit from \code{difftime}) \item timestamp (if supported by the database; returned as \code{POSIXct} respecting the time zone but not necessarily preserving the input time zone), also for timestamps prior to 1970 or 1900 or after 2038 respecting the time zone but not necessarily preserving the input time zone) } Mixing column types in the same table is supported. The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbAppendTableArrow()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}}: no more quoting is done to support databases that allow non-syntactic names for their objects: } The \code{value} argument must be a data frame with a subset of the columns of the existing table. The order of the columns does not matter. } \seealso{ Other Arrow specifications: \code{\link{spec_arrow_create_table_arrow}}, \code{\link{spec_arrow_fetch_arrow}}, \code{\link{spec_arrow_fetch_arrow_chunk}}, \code{\link{spec_arrow_get_query_arrow}}, \code{\link{spec_arrow_read_table_arrow}}, \code{\link{spec_arrow_send_query_arrow}}, \code{\link{spec_arrow_write_table_arrow}}, \code{\link{spec_result_clear_result}} } \concept{Arrow specifications} DBItest/man/spec_meta_has_completed.Rd0000644000176200001440000000324414722017730017447 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-has-completed.R \docType{data} \name{spec_meta_has_completed} \alias{spec_meta_has_completed} \title{spec_meta_has_completed} \value{ \code{dbHasCompleted()} returns a logical scalar. For a query initiated by \code{\link[DBI:dbSendQuery]{DBI::dbSendQuery()}} with non-empty result set, \code{dbHasCompleted()} returns \code{FALSE} initially and \code{TRUE} after calling \code{\link[DBI:dbFetch]{DBI::dbFetch()}} without limit. For a query initiated by \code{\link[DBI:dbSendStatement]{DBI::dbSendStatement()}}, \code{dbHasCompleted()} always returns \code{TRUE}. } \description{ spec_meta_has_completed } \section{Failure modes}{ Attempting to query completion status for a result set cleared with \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}} gives an error. } \section{Specification}{ The completion status for a query is only guaranteed to be set to \code{FALSE} after attempting to fetch past the end of the entire result. Therefore, for a query with an empty result set, the initial return value is unspecified, but the result value is \code{TRUE} after trying to fetch only one row. Similarly, for a query with a result set of length n, the return value is unspecified after fetching n rows, but the result value is \code{TRUE} after trying to fetch only one more row. } \seealso{ Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_bind}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_is_valid}} } \concept{meta specifications} DBItest/man/spec_sql_quote_identifier.Rd0000644000176200001440000000454414722017730020054 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-quote-identifier.R \docType{data} \name{spec_sql_quote_identifier} \alias{spec_sql_quote_identifier} \title{spec_sql_quote_identifier} \value{ \code{dbQuoteIdentifier()} returns an object that can be coerced to \link{character}, of the same length as the input. For an empty character vector this function returns a length-0 object. The names of the input argument are preserved in the output. When passing the returned object again to \code{dbQuoteIdentifier()} as \code{x} argument, it is returned unchanged. Passing objects of class \link[DBI:SQL]{DBI::SQL} should also return them unchanged. (For backends it may be most convenient to return \link[DBI:SQL]{DBI::SQL} objects to achieve this behavior, but this is not required.) } \description{ spec_sql_quote_identifier } \section{Failure modes}{ An error is raised if the input contains \code{NA}, but not for an empty string. } \section{Specification}{ Calling \code{\link[DBI:dbGetQuery]{DBI::dbGetQuery()}} for a query of the format \verb{SELECT 1 AS ...} returns a data frame with the identifier, unquoted, as column name. Quoted identifiers can be used as table and column names in SQL queries, in particular in queries like \verb{SELECT 1 AS ...} and \verb{SELECT * FROM (SELECT 1) ...}. The method must use a quoting mechanism that is unambiguously different from the quoting mechanism used for strings, so that a query like \verb{SELECT ... FROM (SELECT 1 AS ...)} throws an error if the column names do not match. The method can quote column names that contain special characters such as a space, a dot, a comma, or quotes used to mark strings or identifiers, if the database supports this. In any case, checking the validity of the identifier should be performed only when executing a query, and not by \code{dbQuoteIdentifier()}. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/test_connection.Rd0000644000176200001440000000243014602020561016003 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-connection.R \name{test_connection} \alias{test_connection} \title{Test the "Connection" class} \usage{ test_connection(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name minus a possible suffix \verb{_N} where \code{N} is a number. For example, \code{skip = "exists_table"} will skip both \code{"exists_table_1"} and \code{"exists_table_2"}.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test the "Connection" class } \seealso{ Other tests: \code{\link{test_arrow}()}, \code{\link{test_compliance}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/man/spec_sql_write_table.Rd0000644000176200001440000001401114722017730017004 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-write-table.R \docType{data} \name{spec_sql_write_table} \alias{spec_sql_write_table} \title{spec_sql_write_table} \value{ \code{dbWriteTable()} returns \code{TRUE}, invisibly. } \description{ spec_sql_write_table } \section{Failure modes}{ If the table exists, and both \code{append} and \code{overwrite} arguments are unset, or \code{append = TRUE} and the data frame with the new data has different column names, an error is raised; the remote table remains unchanged. An error is raised when calling this method for a closed or invalid connection. An error is also raised if \code{name} cannot be processed with \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}} or if this results in a non-scalar. Invalid values for the additional arguments \code{row.names}, \code{overwrite}, \code{append}, \code{field.types}, and \code{temporary} (non-scalars, unsupported data types, \code{NA}, incompatible values, duplicate or missing names, incompatible columns) also raise an error. } \section{Additional arguments}{ The following arguments are not part of the \code{dbWriteTable()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{row.names} (default: \code{FALSE}) \item \code{overwrite} (default: \code{FALSE}) \item \code{append} (default: \code{FALSE}) \item \code{field.types} (default: \code{NULL}) \item \code{temporary} (default: \code{FALSE}) } They must be provided as named arguments. See the "Specification" and "Value" sections for details on their usage. } \section{Specification}{ The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbWriteTable()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}}: no more quoting is done } The \code{value} argument must be a data frame with a subset of the columns of the existing table if \code{append = TRUE}. The order of the columns does not matter with \code{append = TRUE}. If the \code{overwrite} argument is \code{TRUE}, an existing table of the same name will be overwritten. This argument doesn't change behavior if the table does not exist yet. If the \code{append} argument is \code{TRUE}, the rows in an existing table are preserved, and the new data are appended. If the table doesn't exist yet, it is created. If the \code{temporary} argument is \code{TRUE}, the table is not available in a second connection and is gone after reconnecting. Not all backends support this argument. A regular, non-temporary table is visible in a second connection, in a pre-existing connection, and after reconnecting to the database. SQL keywords can be used freely in table names, column names, and data. Quotes, commas, spaces, and other special characters such as newlines and tabs, can also be used in the data, and, if the database supports non-syntactic identifiers, also for table names and column names. The following data types must be supported at least, and be read identically with \code{\link[DBI:dbReadTable]{DBI::dbReadTable()}}: \itemize{ \item integer \item numeric (the behavior for \code{Inf} and \code{NaN} is not specified) \item logical \item \code{NA} as NULL \item 64-bit values (using \code{"bigint"} as field type); the result can be \itemize{ \item converted to a numeric, which may lose precision, \item converted a character vector, which gives the full decimal representation \item written to another table and read again unchanged } \item character (in both UTF-8 and native encodings), supporting empty strings before and after a non-empty string \item factor (returned as character) \item list of raw (if supported by the database) \item objects of type \link[blob:blob]{blob::blob} (if supported by the database) \item date (if supported by the database; returned as \code{Date}), also for dates prior to 1970 or 1900 or after 2038 \item time (if supported by the database; returned as objects that inherit from \code{difftime}) \item timestamp (if supported by the database; returned as \code{POSIXct} respecting the time zone but not necessarily preserving the input time zone), also for timestamps prior to 1970 or 1900 or after 2038 respecting the time zone but not necessarily preserving the input time zone) } Mixing column types in the same table is supported. The \code{field.types} argument must be a named character vector with at most one entry for each column. It indicates the SQL data type to be used for a new column. If a column is missed from \code{field.types}, the type is inferred from the input data with \code{\link[DBI:dbDataType]{DBI::dbDataType()}}. The interpretation of \link{rownames} depends on the \code{row.names} argument, see \code{\link[DBI:rownames]{DBI::sqlRownamesToColumn()}} for details: \itemize{ \item If \code{FALSE} or \code{NULL}, row names are ignored. \item If \code{TRUE}, row names are converted to a column named "row_names", even if the input data frame only has natural row names from 1 to \code{nrow(...)}. \item If \code{NA}, a column named "row_names" is created if the data has custom row names, no extra column is created in the case of natural row names. \item If a string, this specifies the name of the column in the remote table that contains the row names, even if the input data frame only has natural row names. } The default is \code{row.names = FALSE}. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}} } \concept{sql specifications} DBItest/man/spec_arrow_read_table_arrow.Rd0000644000176200001440000000331214722017730020334 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-arrow-read-table-arrow.R \docType{data} \name{spec_arrow_read_table_arrow} \alias{spec_arrow_read_table_arrow} \title{spec_arrow_read_table_arrow} \value{ \code{dbReadTableArrow()} returns an Arrow object that contains the complete data from the remote table, effectively the result of calling \code{\link[DBI:dbGetQueryArrow]{DBI::dbGetQueryArrow()}} with \verb{SELECT * FROM }. An empty table is returned as an Arrow object with zero rows. } \description{ spec_arrow_read_table_arrow } \section{Failure modes}{ An error is raised if the table does not exist. An error is raised when calling this method for a closed or invalid connection. An error is raised if \code{name} cannot be processed with \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}} or if this results in a non-scalar. } \section{Specification}{ The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbReadTableArrow()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}}: no more quoting is done } } \seealso{ Other Arrow specifications: \code{\link{spec_arrow_append_table_arrow}}, \code{\link{spec_arrow_create_table_arrow}}, \code{\link{spec_arrow_fetch_arrow}}, \code{\link{spec_arrow_fetch_arrow_chunk}}, \code{\link{spec_arrow_get_query_arrow}}, \code{\link{spec_arrow_send_query_arrow}}, \code{\link{spec_arrow_write_table_arrow}}, \code{\link{spec_result_clear_result}} } \concept{Arrow specifications} DBItest/man/spec_meta_get_statement.Rd0000644000176200001440000000163614722017730017506 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-get-statement.R \docType{data} \name{spec_meta_get_statement} \alias{spec_meta_get_statement} \title{spec_meta_get_statement} \value{ \code{dbGetStatement()} returns a string, the query used in either \code{\link[DBI:dbSendQuery]{DBI::dbSendQuery()}} or \code{\link[DBI:dbSendStatement]{DBI::dbSendStatement()}}. } \description{ spec_meta_get_statement } \section{Failure modes}{ Attempting to query the statement for a result set cleared with \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}} gives an error. } \seealso{ Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_bind}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} } \concept{meta specifications} DBItest/man/spec_meta_is_valid.Rd0000644000176200001440000000336614722017730016437 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-is-valid.R \docType{data} \name{spec_meta_is_valid} \alias{spec_meta_is_valid} \title{spec_meta_is_valid} \value{ \code{dbIsValid()} returns a logical scalar, \code{TRUE} if the object specified by \code{dbObj} is valid, \code{FALSE} otherwise. A \link[DBI:DBIConnection-class]{DBI::DBIConnection} object is initially valid, and becomes invalid after disconnecting with \code{\link[DBI:dbDisconnect]{DBI::dbDisconnect()}}. For an invalid connection object (e.g., for some drivers if the object is saved to a file and then restored), the method also returns \code{FALSE}. A \link[DBI:DBIResult-class]{DBI::DBIResult} object is valid after a call to \code{\link[DBI:dbSendQuery]{DBI::dbSendQuery()}}, and stays valid even after all rows have been fetched; only clearing it with \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}} invalidates it. A \link[DBI:DBIResult-class]{DBI::DBIResult} object is also valid after a call to \code{\link[DBI:dbSendStatement]{DBI::dbSendStatement()}}, and stays valid after querying the number of rows affected; only clearing it with \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}} invalidates it. If the connection to the database system is dropped (e.g., due to connectivity problems, server failure, etc.), \code{dbIsValid()} should return \code{FALSE}. This is not tested automatically. } \description{ spec_meta_is_valid } \seealso{ Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_bind}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}} } \concept{meta specifications} DBItest/man/spec_arrow_fetch_arrow.Rd0000644000176200001440000000260514602020561017340 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-arrow-fetch-arrow.R \docType{data} \name{spec_arrow_fetch_arrow} \alias{spec_arrow_fetch_arrow} \title{spec_arrow_fetch_arrow} \value{ \code{dbFetchArrow()} always returns an object coercible to a \link{data.frame} with as many rows as records were fetched and as many columns as fields in the result set, even if the result is a single value or has one or zero rows. } \description{ spec_arrow_fetch_arrow } \section{Failure modes}{ An attempt to fetch from a closed result set raises an error. } \section{Specification}{ Fetching multi-row queries with one or more columns by default returns the entire result. The object returned by \code{dbFetchArrow()} can also be passed to \code{\link[nanoarrow:as_nanoarrow_array_stream]{nanoarrow::as_nanoarrow_array_stream()}} to create a nanoarrow array stream object that can be used to read the result set in batches. The chunk size is implementation-specific. } \seealso{ Other Arrow specifications: \code{\link{spec_arrow_append_table_arrow}}, \code{\link{spec_arrow_create_table_arrow}}, \code{\link{spec_arrow_fetch_arrow_chunk}}, \code{\link{spec_arrow_get_query_arrow}}, \code{\link{spec_arrow_read_table_arrow}}, \code{\link{spec_arrow_send_query_arrow}}, \code{\link{spec_arrow_write_table_arrow}}, \code{\link{spec_result_clear_result}} } \concept{Arrow specifications} DBItest/man/spec_sql_list_objects.Rd0000644000176200001440000000541714722017730017201 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-list-objects.R \docType{data} \name{spec_sql_list_objects} \alias{spec_sql_list_objects} \title{spec_sql_list_objects} \value{ \code{dbListObjects()} returns a data frame with columns \code{table} and \code{is_prefix} (in that order), optionally with other columns with a dot (\code{.}) prefix. The \code{table} column is of type list. Each object in this list is suitable for use as argument in \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}}. The \code{is_prefix} column is a logical. This data frame contains one row for each object (schema, table and view) accessible from the prefix (if passed) or from the global namespace (if prefix is omitted). Tables added with \code{\link[DBI:dbWriteTable]{DBI::dbWriteTable()}} are part of the data frame. As soon a table is removed from the database, it is also removed from the data frame of database objects. The same applies to temporary objects if supported by the database. The returned names are suitable for quoting with \code{dbQuoteIdentifier()}. } \description{ spec_sql_list_objects } \section{Failure modes}{ An error is raised when calling this method for a closed or invalid connection. } \section{Specification}{ The \code{prefix} column indicates if the \code{table} value refers to a table or a prefix. For a call with the default \code{prefix = NULL}, the \code{table} values that have \code{is_prefix == FALSE} correspond to the tables returned from \code{\link[DBI:dbListTables]{DBI::dbListTables()}}, The \code{table} object can be quoted with \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}}. The result of quoting can be passed to \code{\link[DBI:dbUnquoteIdentifier]{DBI::dbUnquoteIdentifier()}}. (For backends it may be convenient to use the \link[DBI:Id]{DBI::Id} class, but this is not required.) Values in \code{table} column that have \code{is_prefix == TRUE} can be passed as the \code{prefix} argument to another call to \code{dbListObjects()}. For the data frame returned from a \code{dbListObject()} call with the \code{prefix} argument set, all \code{table} values where \code{is_prefix} is \code{FALSE} can be used in a call to \code{\link[DBI:dbExistsTable]{DBI::dbExistsTable()}} which returns \code{TRUE}. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/test_meta.Rd0000644000176200001440000000237614602020561014603 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-meta.R \name{test_meta} \alias{test_meta} \title{Test metadata functions} \usage{ test_meta(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name minus a possible suffix \verb{_N} where \code{N} is a number. For example, \code{skip = "exists_table"} will skip both \code{"exists_table_1"} and \code{"exists_table_2"}.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test metadata functions } \seealso{ Other tests: \code{\link{test_arrow}()}, \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/man/spec_meta_get_rows_affected.Rd0000644000176200001440000000252614722017730020314 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-get-rows-affected.R \docType{data} \name{spec_meta_get_rows_affected} \alias{spec_meta_get_rows_affected} \title{spec_meta_get_rows_affected} \value{ \code{dbGetRowsAffected()} returns a scalar number (integer or numeric), the number of rows affected by a data manipulation statement issued with \code{\link[DBI:dbSendStatement]{DBI::dbSendStatement()}}. The value is available directly after the call and does not change after calling \code{\link[DBI:dbFetch]{DBI::dbFetch()}}. \code{NA_integer_} or \code{NA_numeric_} are allowed if the number of rows affected is not known. For queries issued with \code{\link[DBI:dbSendQuery]{DBI::dbSendQuery()}}, zero is returned before and after the call to \code{dbFetch()}. \code{NA} values are not allowed. } \description{ spec_meta_get_rows_affected } \section{Failure modes}{ Attempting to get the rows affected for a result set cleared with \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}} gives an error. } \seealso{ Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_bind}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} } \concept{meta specifications} DBItest/man/spec_sql_exists_table.Rd0000644000176200001440000000342014722017730017173 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-exists-table.R \docType{data} \name{spec_sql_exists_table} \alias{spec_sql_exists_table} \title{spec_sql_exists_table} \value{ \code{dbExistsTable()} returns a logical scalar, \code{TRUE} if the table or view specified by the \code{name} argument exists, \code{FALSE} otherwise. This includes temporary tables if supported by the database. } \description{ spec_sql_exists_table } \section{Failure modes}{ An error is raised when calling this method for a closed or invalid connection. An error is also raised if \code{name} cannot be processed with \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}} or if this results in a non-scalar. } \section{Specification}{ The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbExistsTable()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}}: no more quoting is done } For all tables listed by \code{\link[DBI:dbListTables]{DBI::dbListTables()}}, \code{dbExistsTable()} returns \code{TRUE}. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/spec_sql_quote_string.Rd0000644000176200001440000000421014722017730017226 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-quote-string.R \docType{data} \name{spec_sql_quote_string} \alias{spec_sql_quote_string} \title{spec_sql_quote_string} \value{ \code{dbQuoteString()} returns an object that can be coerced to \link{character}, of the same length as the input. For an empty character vector this function returns a length-0 object. When passing the returned object again to \code{dbQuoteString()} as \code{x} argument, it is returned unchanged. Passing objects of class \link[DBI:SQL]{DBI::SQL} should also return them unchanged. (For backends it may be most convenient to return \link[DBI:SQL]{DBI::SQL} objects to achieve this behavior, but this is not required.) } \description{ spec_sql_quote_string } \section{Specification}{ The returned expression can be used in a \verb{SELECT ...} query, and for any scalar character \code{x} the value of \code{dbGetQuery(paste0("SELECT ", dbQuoteString(x)))[[1]]} must be identical to \code{x}, even if \code{x} contains spaces, tabs, quotes (single or double), backticks, or newlines (in any combination) or is itself the result of a \code{dbQuoteString()} call coerced back to character (even repeatedly). If \code{x} is \code{NA}, the result must merely satisfy \code{\link[=is.na]{is.na()}}. The strings \code{"NA"} or \code{"NULL"} are not treated specially. \code{NA} should be translated to an unquoted SQL \code{NULL}, so that the query \verb{SELECT * FROM (SELECT 1) a WHERE ... IS NULL} returns one row. } \section{Failure modes}{ Passing a numeric, integer, logical, or raw vector, or a list for the \code{x} argument raises an error. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/spec_sql_append_table.Rd0000644000176200001440000000757714722017730017144 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-append-table.R \docType{data} \name{spec_sql_append_table} \alias{spec_sql_append_table} \title{spec_sql_append_table} \value{ \code{dbAppendTable()} returns a scalar numeric. } \description{ spec_sql_append_table } \section{Failure modes}{ If the table does not exist, or the new data in \code{values} is not a data frame or has different column names, an error is raised; the remote table remains unchanged. An error is raised when calling this method for a closed or invalid connection. An error is also raised if \code{name} cannot be processed with \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}} or if this results in a non-scalar. Invalid values for the \code{row.names} argument (non-scalars, unsupported data types, \code{NA}) also raise an error. Passing a \code{value} argument different to \code{NULL} to the \code{row.names} argument (in particular \code{TRUE}, \code{NA}, and a string) raises an error. } \section{Specification}{ SQL keywords can be used freely in table names, column names, and data. Quotes, commas, spaces, and other special characters such as newlines and tabs, can also be used in the data, and, if the database supports non-syntactic identifiers, also for table names and column names. The following data types must be supported at least, and be read identically with \code{\link[DBI:dbReadTable]{DBI::dbReadTable()}}: \itemize{ \item integer \item numeric (the behavior for \code{Inf} and \code{NaN} is not specified) \item logical \item \code{NA} as NULL \item 64-bit values (using \code{"bigint"} as field type); the result can be \itemize{ \item converted to a numeric, which may lose precision, \item converted a character vector, which gives the full decimal representation \item written to another table and read again unchanged } \item character (in both UTF-8 and native encodings), supporting empty strings (before and after non-empty strings) \item factor (returned as character, with a warning) \item list of raw (if supported by the database) \item objects of type \link[blob:blob]{blob::blob} (if supported by the database) \item date (if supported by the database; returned as \code{Date}) also for dates prior to 1970 or 1900 or after 2038 \item time (if supported by the database; returned as objects that inherit from \code{difftime}) \item timestamp (if supported by the database; returned as \code{POSIXct} respecting the time zone but not necessarily preserving the input time zone), also for timestamps prior to 1970 or 1900 or after 2038 respecting the time zone but not necessarily preserving the input time zone) } Mixing column types in the same table is supported. The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbAppendTable()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}}: no more quoting is done to support databases that allow non-syntactic names for their objects: } The \code{row.names} argument must be \code{NULL}, the default value. Row names are ignored. The \code{value} argument must be a data frame with a subset of the columns of the existing table. The order of the columns does not matter. } \seealso{ Other sql specifications: \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/spec_arrow_write_table_arrow.Rd0000644000176200001440000001135714722017730020563 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-arrow-write-table-arrow.R \docType{data} \name{spec_arrow_write_table_arrow} \alias{spec_arrow_write_table_arrow} \title{spec_arrow_write_table_arrow} \value{ \code{dbWriteTableArrow()} returns \code{TRUE}, invisibly. } \description{ spec_arrow_write_table_arrow } \section{Failure modes}{ If the table exists, and both \code{append} and \code{overwrite} arguments are unset, or \code{append = TRUE} and the data frame with the new data has different column names, an error is raised; the remote table remains unchanged. An error is raised when calling this method for a closed or invalid connection. An error is also raised if \code{name} cannot be processed with \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}} or if this results in a non-scalar. Invalid values for the additional arguments \code{overwrite}, \code{append}, and \code{temporary} (non-scalars, unsupported data types, \code{NA}, incompatible values, incompatible columns) also raise an error. } \section{Additional arguments}{ The following arguments are not part of the \code{dbWriteTableArrow()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{overwrite} (default: \code{FALSE}) \item \code{append} (default: \code{FALSE}) \item \code{temporary} (default: \code{FALSE}) } They must be provided as named arguments. See the "Specification" and "Value" sections for details on their usage. } \section{Specification}{ The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbWriteTableArrow()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}}: no more quoting is done } The \code{value} argument must be a data frame with a subset of the columns of the existing table if \code{append = TRUE}. The order of the columns does not matter with \code{append = TRUE}. If the \code{overwrite} argument is \code{TRUE}, an existing table of the same name will be overwritten. This argument doesn't change behavior if the table does not exist yet. If the \code{append} argument is \code{TRUE}, the rows in an existing table are preserved, and the new data are appended. If the table doesn't exist yet, it is created. If the \code{temporary} argument is \code{TRUE}, the table is not available in a second connection and is gone after reconnecting. Not all backends support this argument. A regular, non-temporary table is visible in a second connection, in a pre-existing connection, and after reconnecting to the database. SQL keywords can be used freely in table names, column names, and data. Quotes, commas, spaces, and other special characters such as newlines and tabs, can also be used in the data, and, if the database supports non-syntactic identifiers, also for table names and column names. The following data types must be supported at least, and be read identically with \code{\link[DBI:dbReadTable]{DBI::dbReadTable()}}: \itemize{ \item integer \item numeric (the behavior for \code{Inf} and \code{NaN} is not specified) \item logical \item \code{NA} as NULL \item 64-bit values (using \code{"bigint"} as field type); the result can be \itemize{ \item converted to a numeric, which may lose precision, \item converted a character vector, which gives the full decimal representation \item written to another table and read again unchanged } \item character (in both UTF-8 and native encodings), supporting empty strings before and after a non-empty string \item factor (possibly returned as character) \item objects of type \link[blob:blob]{blob::blob} (if supported by the database) \item date (if supported by the database; returned as \code{Date}), also for dates prior to 1970 or 1900 or after 2038 \item time (if supported by the database; returned as objects that inherit from \code{difftime}) \item timestamp (if supported by the database; returned as \code{POSIXct} respecting the time zone but not necessarily preserving the input time zone), also for timestamps prior to 1970 or 1900 or after 2038 respecting the time zone but not necessarily preserving the input time zone) } Mixing column types in the same table is supported. } \seealso{ Other Arrow specifications: \code{\link{spec_arrow_append_table_arrow}}, \code{\link{spec_arrow_create_table_arrow}}, \code{\link{spec_arrow_fetch_arrow}}, \code{\link{spec_arrow_fetch_arrow_chunk}}, \code{\link{spec_arrow_get_query_arrow}}, \code{\link{spec_arrow_read_table_arrow}}, \code{\link{spec_arrow_send_query_arrow}}, \code{\link{spec_result_clear_result}} } \concept{Arrow specifications} DBItest/man/test_driver.Rd0000644000176200001440000000240414602020561015140 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-driver.R \name{test_driver} \alias{test_driver} \title{Test the "Driver" class} \usage{ test_driver(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name minus a possible suffix \verb{_N} where \code{N} is a number. For example, \code{skip = "exists_table"} will skip both \code{"exists_table_1"} and \code{"exists_table_2"}.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test the "Driver" class } \seealso{ Other tests: \code{\link{test_arrow}()}, \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/man/spec_driver_data_type.Rd0000644000176200001440000000330214722017730017152 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-driver-data-type.R \docType{data} \name{spec_driver_data_type} \alias{spec_driver_data_type} \title{spec_driver_data_type} \value{ \code{dbDataType()} returns the SQL type that corresponds to the \code{obj} argument as a non-empty character string. For data frames, a character vector with one element per column is returned. } \description{ spec_driver_data_type } \section{Failure modes}{ An error is raised for invalid values for the \code{obj} argument such as a \code{NULL} value. } \section{Specification}{ The backend can override the \code{\link[DBI:dbDataType]{DBI::dbDataType()}} generic for its driver class. This generic expects an arbitrary object as second argument. To query the values returned by the default implementation, run \code{example(dbDataType, package = "DBI")}. If the backend needs to override this generic, it must accept all basic R data types as its second argument, namely \link{logical}, \link{integer}, \link{numeric}, \link{character}, dates (see \link{Dates}), date-time (see \link{DateTimeClasses}), and \link{difftime}. If the database supports blobs, this method also must accept lists of \link{raw} vectors, and \link[blob:blob]{blob::blob} objects. As-is objects (i.e., wrapped by \code{\link[=I]{I()}}) must be supported and return the same results as their unwrapped counterparts. The SQL data type for \link{factor} and \link{ordered} is the same as for character. The behavior for other object types is not specified. } \seealso{ Other driver specifications: \code{\link{spec_driver_connect}}, \code{\link{spec_driver_constructor}}, \code{\link{spec_get_info}} } \concept{driver specifications} DBItest/man/spec_sql_list_fields.Rd0000644000176200001440000000326314722017730017013 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-list-fields.R \docType{data} \name{spec_sql_list_fields} \alias{spec_sql_list_fields} \title{spec_sql_list_fields} \value{ \code{dbListFields()} returns a character vector that enumerates all fields in the table in the correct order. This also works for temporary tables if supported by the database. The returned names are suitable for quoting with \code{dbQuoteIdentifier()}. } \description{ spec_sql_list_fields } \section{Failure modes}{ If the table does not exist, an error is raised. Invalid types for the \code{name} argument (e.g., \code{character} of length not equal to one, or numeric) lead to an error. An error is also raised when calling this method for a closed or invalid connection. } \section{Specification}{ The \code{name} argument can be \itemize{ \item a string \item the return value of \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}} \item a value from the \code{table} column from the return value of \code{\link[DBI:dbListObjects]{DBI::dbListObjects()}} where \code{is_prefix} is \code{FALSE} } A column named \code{row_names} is treated like any other column. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/test_transaction.Rd0000644000176200001440000000243114602020561016172 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-transaction.R \name{test_transaction} \alias{test_transaction} \title{Test transaction functions} \usage{ test_transaction(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name minus a possible suffix \verb{_N} where \code{N} is a number. For example, \code{skip = "exists_table"} will skip both \code{"exists_table_1"} and \code{"exists_table_2"}.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test transaction functions } \seealso{ Other tests: \code{\link{test_arrow}()}, \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()} } \concept{tests} DBItest/man/spec_result_clear_result.Rd0000644000176200001440000000323414722017730017713 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-clear-result.R \docType{data} \name{spec_result_clear_result} \alias{spec_result_clear_result} \title{spec_result_clear_result} \value{ \code{dbClearResult()} returns \code{TRUE}, invisibly, for result sets obtained from \code{dbSendQuery()}, \code{dbSendStatement()}, or \code{dbSendQueryArrow()}, } \description{ spec_result_clear_result } \section{Failure modes}{ An attempt to close an already closed result set issues a warning for \code{dbSendQuery()}, \code{dbSendStatement()}, and \code{dbSendQueryArrow()}, } \section{Specification}{ \code{dbClearResult()} frees all resources associated with retrieving the result of a query or update operation. The DBI backend can expect a call to \code{dbClearResult()} for each \code{\link[DBI:dbSendQuery]{DBI::dbSendQuery()}} or \code{\link[DBI:dbSendStatement]{DBI::dbSendStatement()}} call. } \seealso{ Other result specifications: \code{\link{spec_result_create_table_with_data_type}}, \code{\link{spec_result_execute}}, \code{\link{spec_result_fetch}}, \code{\link{spec_result_get_query}}, \code{\link{spec_result_roundtrip}}, \code{\link{spec_result_send_query}}, \code{\link{spec_result_send_statement}} Other Arrow specifications: \code{\link{spec_arrow_append_table_arrow}}, \code{\link{spec_arrow_create_table_arrow}}, \code{\link{spec_arrow_fetch_arrow}}, \code{\link{spec_arrow_fetch_arrow_chunk}}, \code{\link{spec_arrow_get_query_arrow}}, \code{\link{spec_arrow_read_table_arrow}}, \code{\link{spec_arrow_send_query_arrow}}, \code{\link{spec_arrow_write_table_arrow}} } \concept{Arrow specifications} \concept{result specifications} DBItest/man/spec_connection_disconnect.Rd0000644000176200001440000000136614602214416020202 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-connection-disconnect.R \docType{data} \name{spec_connection_disconnect} \alias{spec_connection_disconnect} \title{spec_connection_disconnect} \value{ \code{dbDisconnect()} returns \code{TRUE}, invisibly. } \description{ spec_connection_disconnect } \section{Failure modes}{ A warning is issued on garbage collection when a connection has been released without calling \code{dbDisconnect()}, but this cannot be tested automatically. At least one warning is issued immediately when calling \code{dbDisconnect()} on an already disconnected or invalid connection. } \seealso{ Other connection specifications: \code{\link{spec_get_info}} } \concept{connection specifications} DBItest/man/spec_getting_started.Rd0000644000176200001440000000110514602017371017011 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-getting-started.R \docType{data} \name{spec_getting_started} \alias{spec_getting_started} \title{spec_getting_started} \description{ spec_getting_started } \section{Definition}{ A DBI backend is an R package which imports the \pkg{DBI} and \pkg{methods} packages. For better or worse, the names of many existing backends start with \sQuote{R}, e.g., \pkg{RSQLite}, \pkg{RMySQL}, \pkg{RSQLServer}; it is up to the backend author to adopt this convention or not. } \concept{getting specifications} DBItest/man/spec_result_execute.Rd0000644000176200001440000000600114722017730016664 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-execute.R \docType{data} \name{spec_result_execute} \alias{spec_result_execute} \title{spec_result_execute} \value{ \code{dbExecute()} always returns a scalar numeric that specifies the number of rows affected by the statement. } \description{ spec_result_execute } \section{Failure modes}{ An error is raised when issuing a statement over a closed or invalid connection, if the syntax of the statement is invalid, or if the statement is not a non-\code{NA} string. } \section{Additional arguments}{ The following arguments are not part of the \code{dbExecute()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{params} (default: \code{NULL}) \item \code{immediate} (default: \code{NULL}) } They must be provided as named arguments. See the "Specification" sections for details on their usage. } \section{Specification}{ The \code{param} argument allows passing query parameters, see \code{\link[DBI:dbBind]{DBI::dbBind()}} for details. } \section{Specification for the \code{immediate} argument}{ The \code{immediate} argument supports distinguishing between "direct" and "prepared" APIs offered by many database drivers. Passing \code{immediate = TRUE} leads to immediate execution of the query or statement, via the "direct" API (if supported by the driver). The default \code{NULL} means that the backend should choose whatever API makes the most sense for the database, and (if relevant) tries the other API if the first attempt fails. A successful second attempt should result in a message that suggests passing the correct \code{immediate} argument. Examples for possible behaviors: \enumerate{ \item DBI backend defaults to \code{immediate = TRUE} internally \enumerate{ \item A query without parameters is passed: query is executed \item A query with parameters is passed: \enumerate{ \item \code{params} not given: rejected immediately by the database because of a syntax error in the query, the backend tries \code{immediate = FALSE} (and gives a message) \item \code{params} given: query is executed using \code{immediate = FALSE} } } \item DBI backend defaults to \code{immediate = FALSE} internally \enumerate{ \item A query without parameters is passed: \enumerate{ \item simple query: query is executed \item "special" query (such as setting a config options): fails, the backend tries \code{immediate = TRUE} (and gives a message) } \item A query with parameters is passed: \enumerate{ \item \code{params} not given: waiting for parameters via \code{\link[DBI:dbBind]{DBI::dbBind()}} \item \code{params} given: query is executed } } } } \seealso{ Other result specifications: \code{\link{spec_result_clear_result}}, \code{\link{spec_result_create_table_with_data_type}}, \code{\link{spec_result_fetch}}, \code{\link{spec_result_get_query}}, \code{\link{spec_result_roundtrip}}, \code{\link{spec_result_send_query}}, \code{\link{spec_result_send_statement}} } \concept{result specifications} DBItest/man/spec_sql_create_table.Rd0000644000176200001440000000562414722017730017127 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-create-table.R \docType{data} \name{spec_sql_create_table} \alias{spec_sql_create_table} \title{spec_sql_create_table} \value{ \code{dbCreateTable()} returns \code{TRUE}, invisibly. } \description{ spec_sql_create_table } \section{Failure modes}{ If the table exists, an error is raised; the remote table remains unchanged. An error is raised when calling this method for a closed or invalid connection. An error is also raised if \code{name} cannot be processed with \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}} or if this results in a non-scalar. Invalid values for the \code{row.names} and \code{temporary} arguments (non-scalars, unsupported data types, \code{NA}, incompatible values, duplicate names) also raise an error. } \section{Additional arguments}{ The following arguments are not part of the \code{dbCreateTable()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{temporary} (default: \code{FALSE}) } They must be provided as named arguments. See the "Specification" and "Value" sections for details on their usage. } \section{Specification}{ The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbCreateTable()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}}: no more quoting is done } The \code{value} argument can be: \itemize{ \item a data frame, \item a named list of SQL types } If the \code{temporary} argument is \code{TRUE}, the table is not available in a second connection and is gone after reconnecting. Not all backends support this argument. A regular, non-temporary table is visible in a second connection, in a pre-existing connection, and after reconnecting to the database. SQL keywords can be used freely in table names, column names, and data. Quotes, commas, and spaces can also be used for table names and column names, if the database supports non-syntactic identifiers. The \code{row.names} argument must be missing or \code{NULL}, the default value. All other values for the \code{row.names} argument (in particular \code{TRUE}, \code{NA}, and a string) raise an error. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/spec_sql_read_table.Rd0000644000176200001440000000646414722017730016602 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-read-table.R \docType{data} \name{spec_sql_read_table} \alias{spec_sql_read_table} \title{spec_sql_read_table} \value{ \code{dbReadTable()} returns a data frame that contains the complete data from the remote table, effectively the result of calling \code{\link[DBI:dbGetQuery]{DBI::dbGetQuery()}} with \verb{SELECT * FROM }. An empty table is returned as a data frame with zero rows. The presence of \link{rownames} depends on the \code{row.names} argument, see \code{\link[DBI:rownames]{DBI::sqlColumnToRownames()}} for details: \itemize{ \item If \code{FALSE} or \code{NULL}, the returned data frame doesn't have row names. \item If \code{TRUE}, a column named "row_names" is converted to row names. } \itemize{ \item If \code{NA}, a column named "row_names" is converted to row names if it exists, otherwise no translation occurs. \item If a string, this specifies the name of the column in the remote table that contains the row names. } The default is \code{row.names = FALSE}. If the database supports identifiers with special characters, the columns in the returned data frame are converted to valid R identifiers if the \code{check.names} argument is \code{TRUE}, If \code{check.names = FALSE}, the returned table has non-syntactic column names without quotes. } \description{ spec_sql_read_table } \section{Failure modes}{ An error is raised if the table does not exist. An error is raised if \code{row.names} is \code{TRUE} and no "row_names" column exists, An error is raised if \code{row.names} is set to a string and no corresponding column exists. An error is raised when calling this method for a closed or invalid connection. An error is raised if \code{name} cannot be processed with \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}} or if this results in a non-scalar. Unsupported values for \code{row.names} and \code{check.names} (non-scalars, unsupported data types, \code{NA} for \code{check.names}) also raise an error. } \section{Additional arguments}{ The following arguments are not part of the \code{dbReadTable()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{row.names} (default: \code{FALSE}) \item \code{check.names} } They must be provided as named arguments. See the "Value" section for details on their usage. } \section{Specification}{ The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbReadTable()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}}: no more quoting is done } } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/test_compliance.Rd0000644000176200001440000000243014602020561015756 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-compliance.R \name{test_compliance} \alias{test_compliance} \title{Test full compliance to DBI} \usage{ test_compliance(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name minus a possible suffix \verb{_N} where \code{N} is a number. For example, \code{skip = "exists_table"} will skip both \code{"exists_table_1"} and \code{"exists_table_2"}.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test full compliance to DBI } \seealso{ Other tests: \code{\link{test_arrow}()}, \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/man/spec_result_create_table_with_data_type.Rd0000644000176200001440000000147014602017371022724 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-create-table-with-data-type.R \docType{data} \name{spec_result_create_table_with_data_type} \alias{spec_result_create_table_with_data_type} \title{spec_result_create_table_with_data_type} \description{ spec_result_create_table_with_data_type } \section{Specification}{ All data types returned by \code{dbDataType()} are usable in an SQL statement of the form \code{"CREATE TABLE test (a ...)"}. } \seealso{ Other result specifications: \code{\link{spec_result_clear_result}}, \code{\link{spec_result_execute}}, \code{\link{spec_result_fetch}}, \code{\link{spec_result_get_query}}, \code{\link{spec_result_roundtrip}}, \code{\link{spec_result_send_query}}, \code{\link{spec_result_send_statement}} } \concept{result specifications} DBItest/man/spec_result_get_query.Rd0000644000176200001440000000764414722017730017244 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-get-query.R \docType{data} \name{spec_result_get_query} \alias{spec_result_get_query} \title{spec_result_get_query} \value{ \code{dbGetQuery()} always returns a \link{data.frame}, with as many rows as records were fetched and as many columns as fields in the result set, even if the result is a single value or has one or zero rows. } \description{ spec_result_get_query } \section{Failure modes}{ An error is raised when issuing a query over a closed or invalid connection, if the syntax of the query is invalid, or if the query is not a non-\code{NA} string. If the \code{n} argument is not an atomic whole number greater or equal to -1 or Inf, an error is raised, but a subsequent call to \code{dbGetQuery()} with proper \code{n} argument succeeds. } \section{Additional arguments}{ The following arguments are not part of the \code{dbGetQuery()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{n} (default: -1) \item \code{params} (default: \code{NULL}) \item \code{immediate} (default: \code{NULL}) } They must be provided as named arguments. See the "Specification" and "Value" sections for details on their usage. } \section{Specification}{ A column named \code{row_names} is treated like any other column. The \code{n} argument specifies the number of rows to be fetched. If omitted, fetching multi-row queries with one or more columns returns the entire result. A value of \link{Inf} for the \code{n} argument is supported and also returns the full result. If more rows than available are fetched (by passing a too large value for \code{n}), the result is returned in full without warning. If zero rows are requested, the columns of the data frame are still fully typed. Fetching fewer rows than available is permitted, no warning is issued. The \code{param} argument allows passing query parameters, see \code{\link[DBI:dbBind]{DBI::dbBind()}} for details. } \section{Specification for the \code{immediate} argument}{ The \code{immediate} argument supports distinguishing between "direct" and "prepared" APIs offered by many database drivers. Passing \code{immediate = TRUE} leads to immediate execution of the query or statement, via the "direct" API (if supported by the driver). The default \code{NULL} means that the backend should choose whatever API makes the most sense for the database, and (if relevant) tries the other API if the first attempt fails. A successful second attempt should result in a message that suggests passing the correct \code{immediate} argument. Examples for possible behaviors: \enumerate{ \item DBI backend defaults to \code{immediate = TRUE} internally \enumerate{ \item A query without parameters is passed: query is executed \item A query with parameters is passed: \enumerate{ \item \code{params} not given: rejected immediately by the database because of a syntax error in the query, the backend tries \code{immediate = FALSE} (and gives a message) \item \code{params} given: query is executed using \code{immediate = FALSE} } } \item DBI backend defaults to \code{immediate = FALSE} internally \enumerate{ \item A query without parameters is passed: \enumerate{ \item simple query: query is executed \item "special" query (such as setting a config options): fails, the backend tries \code{immediate = TRUE} (and gives a message) } \item A query with parameters is passed: \enumerate{ \item \code{params} not given: waiting for parameters via \code{\link[DBI:dbBind]{DBI::dbBind()}} \item \code{params} given: query is executed } } } } \seealso{ Other result specifications: \code{\link{spec_result_clear_result}}, \code{\link{spec_result_create_table_with_data_type}}, \code{\link{spec_result_execute}}, \code{\link{spec_result_fetch}}, \code{\link{spec_result_roundtrip}}, \code{\link{spec_result_send_query}}, \code{\link{spec_result_send_statement}} } \concept{result specifications} DBItest/man/spec_driver_constructor.Rd0000644000176200001440000000172414722017730017573 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-driver-constructor.R \docType{data} \name{spec_driver_constructor} \alias{spec_driver_constructor} \title{spec_driver_constructor} \description{ spec_driver_constructor } \section{Construction of the DBIDriver object}{ The backend must support creation of an instance of its \link[DBI:DBIDriver-class]{DBI::DBIDriver} subclass with a \dfn{constructor function}. By default, its name is the package name without the leading \sQuote{R} (if it exists), e.g., \code{SQLite} for the \pkg{RSQLite} package. However, backend authors may choose a different name. The constructor must be exported, and it must be a function that is callable without arguments. DBI recommends to define a constructor with an empty argument list. } \seealso{ Other driver specifications: \code{\link{spec_driver_connect}}, \code{\link{spec_driver_data_type}}, \code{\link{spec_get_info}} } \concept{driver specifications} DBItest/man/spec_meta_column_info.Rd0000644000176200001440000000305614722017730017151 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-column-info.R \docType{data} \name{spec_meta_column_info} \alias{spec_meta_column_info} \title{spec_meta_column_info} \value{ \code{dbColumnInfo()} returns a data frame with at least two columns \code{"name"} and \code{"type"} (in that order) (and optional columns that start with a dot). The \code{"name"} and \code{"type"} columns contain the names and types of the R columns of the data frame that is returned from \code{\link[DBI:dbFetch]{DBI::dbFetch()}}. The \code{"type"} column is of type \code{character} and only for information. Do not compute on the \code{"type"} column, instead use \code{dbFetch(res, n = 0)} to create a zero-row data frame initialized with the correct data types. } \description{ spec_meta_column_info } \section{Failure modes}{ An attempt to query columns for a closed result set raises an error. } \section{Specification}{ A column named \code{row_names} is treated like any other column. The column names are always consistent with the data returned by \code{dbFetch()}. If the query returns unnamed columns, non-empty and non-\code{NA} names are assigned. Column names that correspond to SQL or R keywords are left unchanged. } \seealso{ Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_bind}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} } \concept{meta specifications} DBItest/man/spec_sql_unquote_identifier.Rd0000644000176200001440000000460714722017730020417 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-unquote-identifier.R \docType{data} \name{spec_sql_unquote_identifier} \alias{spec_sql_unquote_identifier} \title{spec_sql_unquote_identifier} \value{ \code{dbUnquoteIdentifier()} returns a list of objects of the same length as the input. For an empty vector, this function returns a length-0 object. The names of the input argument are preserved in the output. If \code{x} is a value returned by \code{dbUnquoteIdentifier()}, calling \code{dbUnquoteIdentifier(..., dbQuoteIdentifier(..., x))} returns \code{list(x)}. If \code{x} is an object of class \link[DBI:Id]{DBI::Id}, calling \code{dbUnquoteIdentifier(..., x)} returns \code{list(x)}. (For backends it may be most convenient to return \link[DBI:Id]{DBI::Id} objects to achieve this behavior, but this is not required.) Plain character vectors can also be passed to \code{dbUnquoteIdentifier()}. } \description{ spec_sql_unquote_identifier } \section{Failure modes}{ An error is raised if a character vectors with a missing value is passed as the \code{x} argument. } \section{Specification}{ For any character vector of length one, quoting (with \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}}) then unquoting then quoting the first element is identical to just quoting. This is also true for strings that contain special characters such as a space, a dot, a comma, or quotes used to mark strings or identifiers, if the database supports this. Unquoting simple strings (consisting of only letters) wrapped with \code{\link[DBI:SQL]{DBI::SQL()}} and then quoting via \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}} gives the same result as just quoting the string. Similarly, unquoting expressions of the form \code{SQL("schema.table")} and then quoting gives the same result as quoting the identifier constructed by \code{Id("schema", "table")}. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/spec_sql_quote_literal.Rd0000644000176200001440000000371414722017730017364 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-quote-literal.R \docType{data} \name{spec_sql_quote_literal} \alias{spec_sql_quote_literal} \title{spec_sql_quote_literal} \value{ \code{dbQuoteLiteral()} returns an object that can be coerced to \link{character}, of the same length as the input. For an empty integer, numeric, character, logical, date, time, or blob vector, this function returns a length-0 object. When passing the returned object again to \code{dbQuoteLiteral()} as \code{x} argument, it is returned unchanged. Passing objects of class \link[DBI:SQL]{DBI::SQL} should also return them unchanged. (For backends it may be most convenient to return \link[DBI:SQL]{DBI::SQL} objects to achieve this behavior, but this is not required.) } \description{ spec_sql_quote_literal } \section{Specification}{ The returned expression can be used in a \verb{SELECT ...} query, and the value of \code{dbGetQuery(paste0("SELECT ", dbQuoteLiteral(x)))[[1]]} must be equal to \code{x} for any scalar integer, numeric, string, and logical. If \code{x} is \code{NA}, the result must merely satisfy \code{\link[=is.na]{is.na()}}. The literals \code{"NA"} or \code{"NULL"} are not treated specially. \code{NA} should be translated to an unquoted SQL \code{NULL}, so that the query \verb{SELECT * FROM (SELECT 1) a WHERE ... IS NULL} returns one row. } \section{Failure modes}{ Passing a list for the \code{x} argument raises an error. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/spec_transaction_begin_commit_rollback.Rd0000644000176200001440000000422714602017371022544 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-transaction-begin-commit-rollback.R \docType{data} \name{spec_transaction_begin_commit_rollback} \alias{spec_transaction_begin_commit_rollback} \title{spec_transaction_begin_commit_rollback} \value{ \code{dbBegin()}, \code{dbCommit()} and \code{dbRollback()} return \code{TRUE}, invisibly. } \description{ spec_transaction_begin_commit_rollback } \section{Failure modes}{ The implementations are expected to raise an error in case of failure, but this is not tested. In any way, all generics throw an error with a closed or invalid connection. In addition, a call to \code{dbCommit()} or \code{dbRollback()} without a prior call to \code{dbBegin()} raises an error. Nested transactions are not supported by DBI, an attempt to call \code{dbBegin()} twice yields an error. } \section{Specification}{ Actual support for transactions may vary between backends. A transaction is initiated by a call to \code{dbBegin()} and committed by a call to \code{dbCommit()}. Data written in a transaction must persist after the transaction is committed. For example, a record that is missing when the transaction is started but is created during the transaction must exist both during and after the transaction, and also in a new connection. A transaction can also be aborted with \code{dbRollback()}. All data written in such a transaction must be removed after the transaction is rolled back. For example, a record that is missing when the transaction is started but is created during the transaction must not exist anymore after the rollback. Disconnection from a connection with an open transaction effectively rolls back the transaction. All data written in such a transaction must be removed after the transaction is rolled back. The behavior is not specified if other arguments are passed to these functions. In particular, \pkg{RSQLite} issues named transactions with support for nesting if the \code{name} argument is set. The transaction isolation level is not specified by DBI. } \seealso{ Other transaction specifications: \code{\link{spec_transaction_with_transaction}} } \concept{transaction specifications} DBItest/man/spec_meta_get_row_count.Rd0000644000176200001440000000266514722017730017524 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-get-row-count.R \docType{data} \name{spec_meta_get_row_count} \alias{spec_meta_get_row_count} \title{spec_meta_get_row_count} \value{ \code{dbGetRowCount()} returns a scalar number (integer or numeric), the number of rows fetched so far. After calling \code{\link[DBI:dbSendQuery]{DBI::dbSendQuery()}}, the row count is initially zero. After a call to \code{\link[DBI:dbFetch]{DBI::dbFetch()}} without limit, the row count matches the total number of rows returned. Fetching a limited number of rows increases the number of rows by the number of rows returned, even if fetching past the end of the result set. For queries with an empty result set, zero is returned even after fetching. For data manipulation statements issued with \code{\link[DBI:dbSendStatement]{DBI::dbSendStatement()}}, zero is returned before and after calling \code{dbFetch()}. } \description{ spec_meta_get_row_count } \section{Failure modes}{ Attempting to get the row count for a result set cleared with \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}} gives an error. } \seealso{ Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_bind}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} } \concept{meta specifications} DBItest/man/spec_arrow_get_query_arrow.Rd0000644000176200001440000000675514722017730020274 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-arrow-get-query-arrow.R \docType{data} \name{spec_arrow_get_query_arrow} \alias{spec_arrow_get_query_arrow} \title{spec_arrow_get_query_arrow} \value{ \code{dbGetQueryArrow()} always returns an object coercible to a \link{data.frame}, with as many rows as records were fetched and as many columns as fields in the result set, even if the result is a single value or has one or zero rows. } \description{ spec_arrow_get_query_arrow } \section{Failure modes}{ An error is raised when issuing a query over a closed or invalid connection, if the syntax of the query is invalid, or if the query is not a non-\code{NA} string. The object returned by \code{dbGetQueryArrow()} can also be passed to \code{\link[nanoarrow:as_nanoarrow_array_stream]{nanoarrow::as_nanoarrow_array_stream()}} to create a nanoarrow array stream object that can be used to read the result set in batches. The chunk size is implementation-specific. } \section{Additional arguments}{ The following arguments are not part of the \code{dbGetQueryArrow()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{params} (default: \code{NULL}) \item \code{immediate} (default: \code{NULL}) } They must be provided as named arguments. See the "Specification" and "Value" sections for details on their usage. The \code{param} argument allows passing query parameters, see \code{\link[DBI:dbBind]{DBI::dbBind()}} for details. } \section{Specification for the \code{immediate} argument}{ The \code{immediate} argument supports distinguishing between "direct" and "prepared" APIs offered by many database drivers. Passing \code{immediate = TRUE} leads to immediate execution of the query or statement, via the "direct" API (if supported by the driver). The default \code{NULL} means that the backend should choose whatever API makes the most sense for the database, and (if relevant) tries the other API if the first attempt fails. A successful second attempt should result in a message that suggests passing the correct \code{immediate} argument. Examples for possible behaviors: \enumerate{ \item DBI backend defaults to \code{immediate = TRUE} internally \enumerate{ \item A query without parameters is passed: query is executed \item A query with parameters is passed: \enumerate{ \item \code{params} not given: rejected immediately by the database because of a syntax error in the query, the backend tries \code{immediate = FALSE} (and gives a message) \item \code{params} given: query is executed using \code{immediate = FALSE} } } \item DBI backend defaults to \code{immediate = FALSE} internally \enumerate{ \item A query without parameters is passed: \enumerate{ \item simple query: query is executed \item "special" query (such as setting a config options): fails, the backend tries \code{immediate = TRUE} (and gives a message) } \item A query with parameters is passed: \enumerate{ \item \code{params} not given: waiting for parameters via \code{\link[DBI:dbBind]{DBI::dbBind()}} \item \code{params} given: query is executed } } } } \seealso{ Other Arrow specifications: \code{\link{spec_arrow_append_table_arrow}}, \code{\link{spec_arrow_create_table_arrow}}, \code{\link{spec_arrow_fetch_arrow}}, \code{\link{spec_arrow_fetch_arrow_chunk}}, \code{\link{spec_arrow_read_table_arrow}}, \code{\link{spec_arrow_send_query_arrow}}, \code{\link{spec_arrow_write_table_arrow}}, \code{\link{spec_result_clear_result}} } \concept{Arrow specifications} DBItest/man/test_result.Rd0000644000176200001440000000240414602020561015163 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-result.R \name{test_result} \alias{test_result} \title{Test the "Result" class} \usage{ test_result(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name minus a possible suffix \verb{_N} where \code{N} is a number. For example, \code{skip = "exists_table"} will skip both \code{"exists_table_1"} and \code{"exists_table_2"}.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test the "Result" class } \seealso{ Other tests: \code{\link{test_arrow}()}, \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/man/DBItest-package.Rd0000644000176200001440000000156514602017371015511 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/DBItest.R \docType{package} \name{DBItest-package} \alias{DBItest} \alias{DBItest-package} \title{DBItest: Testing DBI Backends} \description{ A helper that tests DBI back ends for conformity to the interface. } \details{ The two most important functions are \code{\link[=make_context]{make_context()}} and \code{\link[=test_all]{test_all()}}. The former tells the package how to connect to your DBI backend, the latter executes all tests of the test suite. More fine-grained test functions (all with prefix \code{test_}) are available. See the package's vignette for more details. } \seealso{ Useful links: \itemize{ \item \url{https://dbitest.r-dbi.org} \item \url{https://github.com/r-dbi/DBItest} \item Report bugs at \url{https://github.com/r-dbi/DBItest/issues} } } \author{ Kirill Müller } DBItest/man/test_sql.Rd0000644000176200001440000000235514602020561014451 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-sql.R \name{test_sql} \alias{test_sql} \title{Test SQL methods} \usage{ test_sql(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name minus a possible suffix \verb{_N} where \code{N} is a number. For example, \code{skip = "exists_table"} will skip both \code{"exists_table_1"} and \code{"exists_table_2"}.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Test SQL methods } \seealso{ Other tests: \code{\link{test_arrow}()}, \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/man/test_data_type.Rd0000644000176200001440000000314414722017730015630 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-driver-data-type.R \name{test_data_type} \alias{test_data_type} \title{test_data_type} \usage{ test_data_type(ctx, dbObj) } \arguments{ \item{ctx, dbObj}{Arguments to internal test function} } \value{ \code{dbDataType()} returns the SQL type that corresponds to the \code{obj} argument as a non-empty character string. For data frames, a character vector with one element per column is returned. } \description{ test_data_type } \section{Failure modes}{ An error is raised for invalid values for the \code{obj} argument such as a \code{NULL} value. } \section{Specification}{ The backend can override the \code{\link[DBI:dbDataType]{DBI::dbDataType()}} generic for its driver class. This generic expects an arbitrary object as second argument. To query the values returned by the default implementation, run \code{example(dbDataType, package = "DBI")}. If the backend needs to override this generic, it must accept all basic R data types as its second argument, namely \link{logical}, \link{integer}, \link{numeric}, \link{character}, dates (see \link{Dates}), date-time (see \link{DateTimeClasses}), and \link{difftime}. If the database supports blobs, this method also must accept lists of \link{raw} vectors, and \link[blob:blob]{blob::blob} objects. As-is objects (i.e., wrapped by \code{\link[=I]{I()}}) must be supported and return the same results as their unwrapped counterparts. The SQL data type for \link{factor} and \link{ordered} is the same as for character. The behavior for other object types is not specified. } \keyword{internal} DBItest/man/test_getting_started.Rd0000644000176200001440000000260314602020561017035 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-getting-started.R \name{test_getting_started} \alias{test_getting_started} \title{Getting started with testing} \usage{ test_getting_started(skip = NULL, run_only = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name minus a possible suffix \verb{_N} where \code{N} is a number. For example, \code{skip = "exists_table"} will skip both \code{"exists_table_1"} and \code{"exists_table_2"}.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Tests very basic features of a DBI driver package, to support testing and test-first development right from the start. } \seealso{ Other tests: \code{\link{test_arrow}()}, \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_stress}()}, \code{\link{test_transaction}()} } \concept{tests} DBItest/man/spec_result_fetch.Rd0000644000176200001440000000427414722017730016325 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-fetch.R \docType{data} \name{spec_result_fetch} \alias{spec_result_fetch} \title{spec_result_fetch} \value{ \code{dbFetch()} always returns a \link{data.frame} with as many rows as records were fetched and as many columns as fields in the result set, even if the result is a single value or has one or zero rows. Passing \code{n = NA} is supported and returns an arbitrary number of rows (at least one) as specified by the driver, but at most the remaining rows in the result set. } \description{ spec_result_fetch } \section{Failure modes}{ An attempt to fetch from a closed result set raises an error. If the \code{n} argument is not an atomic whole number greater or equal to -1 or Inf, an error is raised, but a subsequent call to \code{dbFetch()} with proper \code{n} argument succeeds. Calling \code{dbFetch()} on a result set from a data manipulation query created by \code{\link[DBI:dbSendStatement]{DBI::dbSendStatement()}} can be fetched and return an empty data frame, with a warning. } \section{Specification}{ Fetching multi-row queries with one or more columns by default returns the entire result. Multi-row queries can also be fetched progressively by passing a whole number (\link{integer} or \link{numeric}) as the \code{n} argument. A value of \link{Inf} for the \code{n} argument is supported and also returns the full result. If more rows than available are fetched, the result is returned in full without warning. If fewer rows than requested are returned, further fetches will return a data frame with zero rows. If zero rows are fetched, the columns of the data frame are still fully typed. Fetching fewer rows than available is permitted, no warning is issued when clearing the result set. A column named \code{row_names} is treated like any other column. } \seealso{ Other result specifications: \code{\link{spec_result_clear_result}}, \code{\link{spec_result_create_table_with_data_type}}, \code{\link{spec_result_execute}}, \code{\link{spec_result_get_query}}, \code{\link{spec_result_roundtrip}}, \code{\link{spec_result_send_query}}, \code{\link{spec_result_send_statement}} } \concept{result specifications} DBItest/man/make_placeholder_fun.Rd0000644000176200001440000000151014602017371016737 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-bind-.R \name{make_placeholder_fun} \alias{make_placeholder_fun} \title{Create a function that creates n placeholders} \usage{ make_placeholder_fun(pattern) } \arguments{ \item{pattern}{\verb{[character(1)]}\cr Any character, optionally followed by \code{1} or \code{name}. Examples: \code{"?"}, \code{"$1"}, \code{":name"}} } \value{ \verb{[function(n)]}\cr A function with one argument \code{n} that returns a vector of length \code{n} with placeholders of the specified format. } \description{ For internal use by the \code{placeholder_format} tweak. } \examples{ body(DBItest:::make_placeholder_fun("?")) DBItest:::make_placeholder_fun("?")(2) DBItest:::make_placeholder_fun("$1")(3) DBItest:::make_placeholder_fun(":name")(5) } \keyword{internal} DBItest/man/spec_get_info.Rd0000644000176200001440000000530414722017730015423 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-driver-get-info.R, % R/spec-connection-get-info.R, R/spec-meta-get-info-result.R \docType{data} \name{spec_get_info} \alias{spec_get_info} \alias{spec_driver_get_info} \alias{spec_connection_get_info} \alias{spec_meta_get_info_result} \title{spec_driver_get_info} \value{ For objects of class \link[DBI:DBIDriver-class]{DBI::DBIDriver}, \code{dbGetInfo()} returns a named list that contains at least the following components: \itemize{ \item \code{driver.version}: the package version of the DBI backend, \item \code{client.version}: the version of the DBMS client library. } For objects of class \link[DBI:DBIConnection-class]{DBI::DBIConnection}, \code{dbGetInfo()} returns a named list that contains at least the following components: \itemize{ \item \code{db.version}: version of the database server, \item \code{dbname}: database name, \item \code{username}: username to connect to the database, \item \code{host}: hostname of the database server, \item \code{port}: port on the database server. It must not contain a \code{password} component. Components that are not applicable should be set to \code{NA}. } For objects of class \link[DBI:DBIResult-class]{DBI::DBIResult}, \code{dbGetInfo()} returns a named list that contains at least the following components: \itemize{ \item \code{statatment}: the statement used with \code{\link[DBI:dbSendQuery]{DBI::dbSendQuery()}} or \code{\link[DBI:dbExecute]{DBI::dbExecute()}}, as returned by \code{\link[DBI:dbGetStatement]{DBI::dbGetStatement()}}, \item \code{row.count}: the number of rows fetched so far (for queries), as returned by \code{\link[DBI:dbGetRowCount]{DBI::dbGetRowCount()}}, \item \code{rows.affected}: the number of rows affected (for statements), as returned by \code{\link[DBI:dbGetRowsAffected]{DBI::dbGetRowsAffected()}} \item \code{has.completed}: a logical that indicates if the query or statement has completed, as returned by \code{\link[DBI:dbHasCompleted]{DBI::dbHasCompleted()}}. } } \description{ spec_driver_get_info spec_connection_get_info spec_meta_get_info_result } \seealso{ Other driver specifications: \code{\link{spec_driver_connect}}, \code{\link{spec_driver_constructor}}, \code{\link{spec_driver_data_type}} Other connection specifications: \code{\link{spec_connection_disconnect}} Other meta specifications: \code{\link{spec_meta_bind}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} } \concept{connection specifications} \concept{driver specifications} \concept{meta specifications} DBItest/man/spec_arrow_create_table_arrow.Rd0000644000176200001440000000534314722017730020672 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-arrow-create-table-arrow.R \docType{data} \name{spec_arrow_create_table_arrow} \alias{spec_arrow_create_table_arrow} \title{spec_arrow_create_table_arrow} \value{ \code{dbCreateTableArrow()} returns \code{TRUE}, invisibly. } \description{ spec_arrow_create_table_arrow } \section{Failure modes}{ If the table exists, an error is raised; the remote table remains unchanged. An error is raised when calling this method for a closed or invalid connection. An error is also raised if \code{name} cannot be processed with \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}} or if this results in a non-scalar. Invalid values for the \code{temporary} argument (non-scalars, unsupported data types, \code{NA}, incompatible values, duplicate names) also raise an error. } \section{Additional arguments}{ The following arguments are not part of the \code{dbCreateTableArrow()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{temporary} (default: \code{FALSE}) } They must be provided as named arguments. See the "Specification" and "Value" sections for details on their usage. } \section{Specification}{ The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbCreateTableArrow()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}}: no more quoting is done } The \code{value} argument can be: \itemize{ \item a data frame, \item a nanoarrow array \item a nanoarrow array stream (which will still contain the data after the call) \item a nanoarrow schema } If the \code{temporary} argument is \code{TRUE}, the table is not available in a second connection and is gone after reconnecting. Not all backends support this argument. A regular, non-temporary table is visible in a second connection, in a pre-existing connection, and after reconnecting to the database. SQL keywords can be used freely in table names, column names, and data. Quotes, commas, and spaces can also be used for table names and column names, if the database supports non-syntactic identifiers. } \seealso{ Other Arrow specifications: \code{\link{spec_arrow_append_table_arrow}}, \code{\link{spec_arrow_fetch_arrow}}, \code{\link{spec_arrow_fetch_arrow_chunk}}, \code{\link{spec_arrow_get_query_arrow}}, \code{\link{spec_arrow_read_table_arrow}}, \code{\link{spec_arrow_send_query_arrow}}, \code{\link{spec_arrow_write_table_arrow}}, \code{\link{spec_result_clear_result}} } \concept{Arrow specifications} DBItest/man/spec_compliance_methods.Rd0000644000176200001440000000150314722017730017463 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-compliance-methods.R \docType{data} \name{spec_compliance_methods} \alias{spec_compliance_methods} \title{spec_compliance_methods} \description{ spec_compliance_methods } \section{DBI classes and methods}{ A backend defines three classes, which are subclasses of \link[DBI:DBIDriver-class]{DBI::DBIDriver}, \link[DBI:DBIConnection-class]{DBI::DBIConnection}, and \link[DBI:DBIResult-class]{DBI::DBIResult}. The backend provides implementation for all methods of these base classes that are defined but not implemented by DBI. All methods defined in \pkg{DBI} are reexported (so that the package can be used without having to attach \pkg{DBI}), and have an ellipsis \code{...} in their formals for extensibility. } \concept{compliance specifications} DBItest/man/spec_transaction_with_transaction.Rd0000644000176200001440000000246114722017730021617 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-transaction-with-transaction.R \docType{data} \name{spec_transaction_with_transaction} \alias{spec_transaction_with_transaction} \title{spec_transaction_with_transaction} \value{ \code{dbWithTransaction()} returns the value of the executed code. } \description{ spec_transaction_with_transaction } \section{Failure modes}{ Failure to initiate the transaction (e.g., if the connection is closed or invalid of if \code{\link[DBI:transactions]{DBI::dbBegin()}} has been called already) gives an error. } \section{Specification}{ \code{dbWithTransaction()} initiates a transaction with \code{dbBegin()}, executes the code given in the \code{code} argument, and commits the transaction with \code{\link[DBI:transactions]{DBI::dbCommit()}}. If the code raises an error, the transaction is instead aborted with \code{\link[DBI:transactions]{DBI::dbRollback()}}, and the error is propagated. If the code calls \code{dbBreak()}, execution of the code stops and the transaction is silently aborted. All side effects caused by the code (such as the creation of new variables) propagate to the calling environment. } \seealso{ Other transaction specifications: \code{\link{spec_transaction_begin_commit_rollback}} } \concept{transaction specifications} DBItest/man/spec_sql_list_tables.Rd0000644000176200001440000000244414722017730017017 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-list-tables.R \docType{data} \name{spec_sql_list_tables} \alias{spec_sql_list_tables} \title{spec_sql_list_tables} \value{ \code{dbListTables()} returns a character vector that enumerates all tables and views in the database. Tables added with \code{\link[DBI:dbWriteTable]{DBI::dbWriteTable()}} are part of the list. As soon a table is removed from the database, it is also removed from the list of database tables. The same applies to temporary tables if supported by the database. The returned names are suitable for quoting with \code{dbQuoteIdentifier()}. } \description{ spec_sql_list_tables } \section{Failure modes}{ An error is raised when calling this method for a closed or invalid connection. } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_remove_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/test_stress.Rd0000644000176200001440000000206114602020561015167 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-stress.R \name{test_stress} \alias{test_stress} \title{Stress tests} \usage{ test_stress(skip = NULL, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name minus a possible suffix \verb{_N} where \code{N} is a number. For example, \code{skip = "exists_table"} will skip both \code{"exists_table_1"} and \code{"exists_table_2"}.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} } \description{ Stress tests } \seealso{ Other tests: \code{\link{test_arrow}()}, \code{\link{test_compliance}()}, \code{\link{test_connection}()}, \code{\link{test_driver}()}, \code{\link{test_getting_started}()}, \code{\link{test_meta}()}, \code{\link{test_result}()}, \code{\link{test_sql}()}, \code{\link{test_transaction}()} } \concept{tests} \keyword{internal} DBItest/man/spec_arrow_fetch_arrow_chunk.Rd0000644000176200001440000000260314602020561020526 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-arrow-fetch-arrow-chunk.R \docType{data} \name{spec_arrow_fetch_arrow_chunk} \alias{spec_arrow_fetch_arrow_chunk} \title{spec_arrow_fetch_arrow_chunk} \value{ \code{dbFetchArrowChunk()} always returns an object coercible to a \link{data.frame} with as many rows as records were fetched and as many columns as fields in the result set, even if the result is a single value or has one or zero rows. } \description{ spec_arrow_fetch_arrow_chunk } \section{Failure modes}{ An attempt to fetch from a closed result set raises an error. } \section{Specification}{ Fetching multi-row queries with one or more columns returns the next chunk. The size of the chunk is implementation-specific. The object returned by \code{dbFetchArrowChunk()} can also be passed to \code{\link[nanoarrow:as_nanoarrow_array]{nanoarrow::as_nanoarrow_array()}} to create a nanoarrow array object. The chunk size is implementation-specific. } \seealso{ Other Arrow specifications: \code{\link{spec_arrow_append_table_arrow}}, \code{\link{spec_arrow_create_table_arrow}}, \code{\link{spec_arrow_fetch_arrow}}, \code{\link{spec_arrow_get_query_arrow}}, \code{\link{spec_arrow_read_table_arrow}}, \code{\link{spec_arrow_send_query_arrow}}, \code{\link{spec_arrow_write_table_arrow}}, \code{\link{spec_result_clear_result}} } \concept{Arrow specifications} DBItest/man/context.Rd0000644000176200001440000000405514722017730014305 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/context.R \name{make_context} \alias{make_context} \alias{set_default_context} \alias{get_default_context} \title{Test contexts} \usage{ make_context( drv, connect_args = NULL, set_as_default = TRUE, tweaks = NULL, name = NULL, default_skip = NULL ) set_default_context(ctx) get_default_context() } \arguments{ \item{drv}{\verb{[DBIConnector]}\cr An object of class \link[DBI:DBIConnector-class]{DBI::DBIConnector} that describes how to connect to the database.} \item{connect_args}{\verb{[named list]}\cr Deprecated.} \item{set_as_default}{\verb{[logical(1)]}\cr Should the created context be set as default context?} \item{tweaks}{\verb{[DBItest_tweaks]}\cr Tweaks as constructed by the \code{\link[=tweaks]{tweaks()}} function.} \item{name}{\verb{[character]}\cr An optional name of the context which will be used in test messages.} \item{default_skip}{\verb{[character]}\cr Default value of \code{skip} argument to \code{\link[=test_all]{test_all()}} and other testing functions.} \item{ctx}{\verb{[DBItest_context]}\cr A test context.} } \value{ \verb{[DBItest_context]}\cr A test context, for \code{set_default_context} the previous default context (invisibly) or \code{NULL}. } \description{ Create a test context, set and query the default context. } \examples{ \dontshow{if (requireNamespace("RSQLite", quietly = TRUE)) withAutoprint(\{ # examplesIf} make_context( new( "DBIConnector", .drv = RSQLite::SQLite(), .conn_args = list(dbname = tempfile("DBItest", fileext = ".sqlite")) ), tweaks = tweaks( constructor_relax_args = TRUE, placeholder_pattern = c("?", "$1", "$name", ":name"), date_cast = function(x) paste0("'", x, "'"), time_cast = function(x) paste0("'", x, "'"), timestamp_cast = function(x) paste0("'", x, "'"), logical_return = function(x) as.integer(x), date_typed = FALSE, time_typed = FALSE, timestamp_typed = FALSE ), default_skip = c("roundtrip_date", "roundtrip_timestamp") ) \dontshow{\}) # examplesIf} } DBItest/man/spec_sql_remove_table.Rd0000644000176200001440000000521714722017730017157 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-sql-remove-table.R \docType{data} \name{spec_sql_remove_table} \alias{spec_sql_remove_table} \title{spec_sql_remove_table} \value{ \code{dbRemoveTable()} returns \code{TRUE}, invisibly. } \description{ spec_sql_remove_table } \section{Failure modes}{ If the table does not exist, an error is raised. An attempt to remove a view with this function may result in an error. An error is raised when calling this method for a closed or invalid connection. An error is also raised if \code{name} cannot be processed with \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}} or if this results in a non-scalar. } \section{Additional arguments}{ The following arguments are not part of the \code{dbRemoveTable()} generic (to improve compatibility across backends) but are part of the DBI specification: \itemize{ \item \code{temporary} (default: \code{FALSE}) \item \code{fail_if_missing} (default: \code{TRUE}) } These arguments must be provided as named arguments. If \code{temporary} is \code{TRUE}, the call to \code{dbRemoveTable()} will consider only temporary tables. Not all backends support this argument. In particular, permanent tables of the same name are left untouched. If \code{fail_if_missing} is \code{FALSE}, the call to \code{dbRemoveTable()} succeeds if the table does not exist. } \section{Specification}{ A table removed by \code{dbRemoveTable()} doesn't appear in the list of tables returned by \code{\link[DBI:dbListTables]{DBI::dbListTables()}}, and \code{\link[DBI:dbExistsTable]{DBI::dbExistsTable()}} returns \code{FALSE}. The removal propagates immediately to other connections to the same database. This function can also be used to remove a temporary table. The \code{name} argument is processed as follows, to support databases that allow non-syntactic names for their objects: \itemize{ \item If an unquoted table name as string: \code{dbRemoveTable()} will do the quoting, perhaps by calling \code{dbQuoteIdentifier(conn, x = name)} \item If the result of a call to \code{\link[DBI:dbQuoteIdentifier]{DBI::dbQuoteIdentifier()}}: no more quoting is done } } \seealso{ Other sql specifications: \code{\link{spec_sql_append_table}}, \code{\link{spec_sql_create_table}}, \code{\link{spec_sql_exists_table}}, \code{\link{spec_sql_list_fields}}, \code{\link{spec_sql_list_objects}}, \code{\link{spec_sql_list_tables}}, \code{\link{spec_sql_quote_identifier}}, \code{\link{spec_sql_quote_literal}}, \code{\link{spec_sql_quote_string}}, \code{\link{spec_sql_read_table}}, \code{\link{spec_sql_unquote_identifier}}, \code{\link{spec_sql_write_table}} } \concept{sql specifications} DBItest/man/test_all.Rd0000644000176200001440000000516214602020561014421 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test-all.R, R/test-getting-started.R, % R/test-driver.R, R/test-connection.R, R/test-result.R, R/test-sql.R, % R/test-meta.R, R/test-transaction.R, R/test-arrow.R, R/test-compliance.R, % R/test-stress.R \name{test_all} \alias{test_all} \alias{test_some} \title{Run all tests} \usage{ test_all(skip = NULL, run_only = NULL, ctx = get_default_context()) test_some(test, ctx = get_default_context()) } \arguments{ \item{skip}{\verb{[character()]}\cr A vector of regular expressions to match against test names; skip test if matching any. The regular expressions are matched against the entire test name minus a possible suffix \verb{_N} where \code{N} is a number. For example, \code{skip = "exists_table"} will skip both \code{"exists_table_1"} and \code{"exists_table_2"}.} \item{run_only}{\verb{[character()]}\cr A vector of regular expressions to match against test names; run only these tests. The regular expressions are matched against the entire test name.} \item{ctx}{\verb{[DBItest_context]}\cr A test context as created by \code{\link[=make_context]{make_context()}}.} \item{test}{\verb{[character]}\cr A character vector of regular expressions describing the tests to run. The regular expressions are matched against the entire test name.} } \description{ \code{test_all()} calls all tests defined in this package (see the section "Tests" below). This function supports running only one test by setting an environment variable, e.g., set the \code{DBITEST_ONLY_RESULT} to a nonempty value to run only \code{test_result()}. \code{test_some()} allows testing one or more tests. } \details{ Internally \code{^} and \code{$} are used as prefix and suffix around the regular expressions passed in the \code{skip} and \code{run_only} arguments. } \section{Tests}{ This function runs the following tests, except the stress tests: \code{\link[=test_getting_started]{test_getting_started()}}: Getting started with testing \code{\link[=test_driver]{test_driver()}}: Test the "Driver" class \code{\link[=test_connection]{test_connection()}}: Test the "Connection" class \code{\link[=test_result]{test_result()}}: Test the "Result" class \code{\link[=test_sql]{test_sql()}}: Test SQL methods \code{\link[=test_meta]{test_meta()}}: Test metadata functions \code{\link[=test_transaction]{test_transaction()}}: Test transaction functions \code{\link[=test_arrow]{test_arrow()}}: Test Arrow methods \code{\link[=test_compliance]{test_compliance()}}: Test full compliance to DBI \code{\link[=test_stress]{test_stress()}}: Stress tests (not tested with \code{test_all}) } DBItest/man/spec_meta_bind.Rd0000644000176200001440000001426714722017730015563 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-meta-bind-runner.R, % R/spec-meta-bind-formals.R, R/spec-meta-bind-expr.R \docType{data} \name{spec_meta_bind} \alias{spec_meta_bind} \title{spec_meta_bind} \value{ \code{dbBind()} returns the result set, invisibly, for queries issued by \code{\link[DBI:dbSendQuery]{DBI::dbSendQuery()}} or \code{\link[DBI:dbSendQueryArrow]{DBI::dbSendQueryArrow()}} and also for data manipulation statements issued by \code{\link[DBI:dbSendStatement]{DBI::dbSendStatement()}}. } \description{ spec_meta_bind spec_meta_bind spec_meta_bind } \section{Specification}{ \pkg{DBI} clients execute parametrized statements as follows: \enumerate{ \item Call \code{\link[DBI:dbSendQuery]{DBI::dbSendQuery()}}, \code{\link[DBI:dbSendQueryArrow]{DBI::dbSendQueryArrow()}} or \code{\link[DBI:dbSendStatement]{DBI::dbSendStatement()}} with a query or statement that contains placeholders, store the returned \link[DBI:DBIResult-class]{DBI::DBIResult} object in a variable. Mixing placeholders (in particular, named and unnamed ones) is not recommended. It is good practice to register a call to \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}} via \code{\link[=on.exit]{on.exit()}} right after calling \code{dbSendQuery()} or \code{dbSendStatement()} (see the last enumeration item). Until \code{\link[DBI:dbBind]{DBI::dbBind()}} or \code{\link[DBI:dbBind]{DBI::dbBindArrow()}} have been called, the returned result set object has the following behavior: \itemize{ \item \code{\link[DBI:dbFetch]{DBI::dbFetch()}} raises an error (for \code{dbSendQuery()} and \code{dbSendQueryArrow()}) \item \code{\link[DBI:dbGetRowCount]{DBI::dbGetRowCount()}} returns zero (for \code{dbSendQuery()} and \code{dbSendQueryArrow()}) \item \code{\link[DBI:dbGetRowsAffected]{DBI::dbGetRowsAffected()}} returns an integer \code{NA} (for \code{dbSendStatement()}) \item \code{\link[DBI:dbIsValid]{DBI::dbIsValid()}} returns \code{TRUE} \item \code{\link[DBI:dbHasCompleted]{DBI::dbHasCompleted()}} returns \code{FALSE} } \item Call \code{\link[DBI:dbBind]{DBI::dbBind()}} or \code{\link[DBI:dbBind]{DBI::dbBindArrow()}}: \itemize{ \item For \code{\link[DBI:dbBind]{DBI::dbBind()}}, the \code{params} argument must be a list where all elements have the same lengths and contain values supported by the backend. A \link{data.frame} is internally stored as such a list. \item For \code{\link[DBI:dbBind]{DBI::dbBindArrow()}}, the \code{params} argument must be a nanoarrow array stream, with one column per query parameter. } \item Retrieve the data or the number of affected rows from the \code{DBIResult} object. \itemize{ \item For queries issued by \code{dbSendQuery()} or \code{dbSendQueryArrow()}, call \code{\link[DBI:dbFetch]{DBI::dbFetch()}}. \item For statements issued by \code{dbSendStatements()}, call \code{\link[DBI:dbGetRowsAffected]{DBI::dbGetRowsAffected()}}. (Execution begins immediately after the \code{\link[DBI:dbBind]{DBI::dbBind()}} call, the statement is processed entirely before the function returns.) } \item Repeat 2. and 3. as necessary. \item Close the result set via \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}}. } The elements of the \code{params} argument do not need to be scalars, vectors of arbitrary length (including length 0) are supported. For queries, calling \code{dbFetch()} binding such parameters returns concatenated results, equivalent to binding and fetching for each set of values and connecting via \code{\link[=rbind]{rbind()}}. For data manipulation statements, \code{dbGetRowsAffected()} returns the total number of rows affected if binding non-scalar parameters. \code{dbBind()} also accepts repeated calls on the same result set for both queries and data manipulation statements, even if no results are fetched between calls to \code{dbBind()}, for both queries and data manipulation statements. If the placeholders in the query are named, their order in the \code{params} argument is not important. At least the following data types are accepted on input (including \link{NA}): \itemize{ \item \link{integer} \item \link{numeric} \item \link{logical} for Boolean values \item \link{character} (also with special characters such as spaces, newlines, quotes, and backslashes) \item \link{factor} (bound as character, with warning) \item \link[lubridate:date_utils]{lubridate::Date} (also when stored internally as integer) \item \link[lubridate:posix_utils]{lubridate::POSIXct} timestamps \item \link{POSIXlt} timestamps \item \link{difftime} values (also with units other than seconds and with the value stored as integer) \item lists of \link{raw} for blobs (with \code{NULL} entries for SQL NULL values) \item objects of type \link[blob:blob]{blob::blob} } } \section{Failure modes}{ Calling \code{dbBind()} for a query without parameters raises an error. Binding too many or not enough values, or parameters with wrong names or unequal length, also raises an error. If the placeholders in the query are named, all parameter values must have names (which must not be empty or \code{NA}), and vice versa, otherwise an error is raised. The behavior for mixing placeholders of different types (in particular mixing positional and named placeholders) is not specified. Calling \code{dbBind()} on a result set already cleared by \code{\link[DBI:dbClearResult]{DBI::dbClearResult()}} also raises an error. } \seealso{ Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} Other meta specifications: \code{\link{spec_get_info}}, \code{\link{spec_meta_column_info}}, \code{\link{spec_meta_get_row_count}}, \code{\link{spec_meta_get_rows_affected}}, \code{\link{spec_meta_get_statement}}, \code{\link{spec_meta_has_completed}}, \code{\link{spec_meta_is_valid}} } \concept{meta specifications} DBItest/man/spec_result_roundtrip.Rd0000644000176200001440000000526414722017730017262 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spec-result-roundtrip.R \docType{data} \name{spec_result_roundtrip} \alias{spec_result_roundtrip} \title{spec_result_roundtrip} \description{ spec_result_roundtrip } \section{Specification}{ The column types of the returned data frame depend on the data returned: \itemize{ \item \link{integer} (or coercible to an integer) for integer values between -2^31 and 2^31 - 1, with \link{NA} for SQL \code{NULL} values \item \link{numeric} for numbers with a fractional component, with NA for SQL \code{NULL} values \item \link{logical} for Boolean values (some backends may return an integer); with NA for SQL \code{NULL} values \item \link{character} for text, with NA for SQL \code{NULL} values \item lists of \link{raw} for blobs with \link{NULL} entries for SQL NULL values \item coercible using \code{\link[=as.Date]{as.Date()}} for dates, with NA for SQL \code{NULL} values (also applies to the return value of the SQL function \code{current_date}) \item coercible using \code{\link[hms:hms]{hms::as_hms()}} for times, with NA for SQL \code{NULL} values (also applies to the return value of the SQL function \code{current_time}) \item coercible using \code{\link[=as.POSIXct]{as.POSIXct()}} for timestamps, with NA for SQL \code{NULL} values (also applies to the return value of the SQL function \code{current_timestamp}) } If dates and timestamps are supported by the backend, the following R types are used: \itemize{ \item \link[lubridate:date_utils]{lubridate::Date} for dates (also applies to the return value of the SQL function \code{current_date}) \item \link[lubridate:posix_utils]{lubridate::POSIXct} for timestamps (also applies to the return value of the SQL function \code{current_timestamp}) } R has no built-in type with lossless support for the full range of 64-bit or larger integers. If 64-bit integers are returned from a query, the following rules apply: \itemize{ \item Values are returned in a container with support for the full range of valid 64-bit values (such as the \code{integer64} class of the \pkg{bit64} package) \item Coercion to numeric always returns a number that is as close as possible to the true value \item Loss of precision when converting to numeric gives a warning \item Conversion to character always returns a lossless decimal representation of the data } } \seealso{ Other result specifications: \code{\link{spec_result_clear_result}}, \code{\link{spec_result_create_table_with_data_type}}, \code{\link{spec_result_execute}}, \code{\link{spec_result_fetch}}, \code{\link{spec_result_get_query}}, \code{\link{spec_result_send_query}}, \code{\link{spec_result_send_statement}} } \concept{result specifications} DBItest/man/tweaks.Rd0000644000176200001440000001107614722017730014120 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tweaks.R \name{tweaks} \alias{tweaks} \title{Tweaks for DBI tests} \usage{ tweaks( ..., constructor_name = NULL, constructor_relax_args = FALSE, strict_identifier = FALSE, omit_blob_tests = FALSE, current_needs_parens = FALSE, union = function(x) paste(x, collapse = " UNION "), placeholder_pattern = NULL, logical_return = identity, date_cast = function(x) paste0("date('", x, "')"), time_cast = function(x) paste0("time('", x, "')"), timestamp_cast = function(x) paste0("timestamp('", x, "')"), blob_cast = identity, date_typed = TRUE, time_typed = TRUE, timestamp_typed = TRUE, temporary_tables = TRUE, list_temporary_tables = TRUE, allow_na_rows_affected = FALSE, is_null_check = function(x) paste0("(", x, " IS NULL)"), create_table_as = function(table_name, query) paste0("CREATE TABLE ", table_name, " AS ", query), dbitest_version = "1.7.1" ) } \arguments{ \item{...}{\verb{[any]}\cr Unknown tweaks are accepted, with a warning. The ellipsis also makes sure that you only can pass named arguments.} \item{constructor_name}{\verb{[character(1)]}\cr Name of the function that constructs the \code{Driver} object.} \item{constructor_relax_args}{\verb{[logical(1)]}\cr If \code{TRUE}, allow a driver constructor with default values for all arguments; otherwise, require a constructor with empty argument list (default).} \item{strict_identifier}{\verb{[logical(1)]}\cr Set to \code{TRUE} if the DBMS does not support arbitrarily-named identifiers even when quoting is used.} \item{omit_blob_tests}{\verb{[logical(1)]}\cr Set to \code{TRUE} if the DBMS does not support a \code{BLOB} data type.} \item{current_needs_parens}{\verb{[logical(1)]}\cr Set to \code{TRUE} if the SQL functions \code{current_date}, \code{current_time}, and \code{current_timestamp} require parentheses.} \item{union}{\verb{[function(character)]}\cr Function that combines several subqueries into one so that the resulting query returns the concatenated results of the subqueries} \item{placeholder_pattern}{\verb{[character]}\cr A pattern for placeholders used in \code{\link[DBI:dbBind]{DBI::dbBind()}}, e.g., \code{"?"}, \code{"$1"}, or \code{":name"}. See \code{\link[=make_placeholder_fun]{make_placeholder_fun()}} for details.} \item{logical_return}{\verb{[function(logical)]}\cr A vectorized function that converts logical values to the data type returned by the DBI backend.} \item{date_cast}{\verb{[function(character)]}\cr A vectorized function that creates an SQL expression for coercing a string to a date value.} \item{time_cast}{\verb{[function(character)]}\cr A vectorized function that creates an SQL expression for coercing a string to a time value.} \item{timestamp_cast}{\verb{[function(character)]}\cr A vectorized function that creates an SQL expression for coercing a string to a timestamp value.} \item{blob_cast}{\verb{[function(character)]}\cr A vectorized function that creates an SQL expression for coercing a string to a blob value.} \item{date_typed}{\verb{[logical(1L)]}\cr Set to \code{FALSE} if the DBMS doesn't support a dedicated type for dates.} \item{time_typed}{\verb{[logical(1L)]}\cr Set to \code{FALSE} if the DBMS doesn't support a dedicated type for times.} \item{timestamp_typed}{\verb{[logical(1L)]}\cr Set to \code{FALSE} if the DBMS doesn't support a dedicated type for timestamps.} \item{temporary_tables}{\verb{[logical(1L)]}\cr Set to \code{FALSE} if the DBMS doesn't support temporary tables.} \item{list_temporary_tables}{\verb{[logical(1L)]}\cr Set to \code{FALSE} if the DBMS doesn't support listing temporary tables.} \item{allow_na_rows_affected}{\verb{[logical(1L)]}\cr Set to \code{TRUE} to allow \code{\link[DBI:dbGetRowsAffected]{DBI::dbGetRowsAffected()}} to return \code{NA}.} \item{is_null_check}{\verb{[function(character)]}\cr A vectorized function that creates an SQL expression for checking if a value is \code{NULL}.} \item{create_table_as}{\verb{[function(character(1), character(1))]}\cr A function that creates an SQL expression for creating a table from an SQL expression.} \item{dbitest_version}{\verb{[character(1)]}\cr Compatible DBItest version, default: "1.7.1".} } \description{ The tweaks are a way to control the behavior of certain tests. Currently, you need to search the \pkg{DBItest} source code to understand which tests are affected by which tweaks. This function is usually called to set the \code{tweaks} argument in a \code{\link[=make_context]{make_context()}} call. } \examples{ \dontrun{ make_context(..., tweaks = tweaks(strict_identifier = TRUE)) } } DBItest/DESCRIPTION0000644000176200001440000000746214725062511013272 0ustar liggesusersPackage: DBItest Title: Testing DBI Backends Version: 1.8.2 Date: 2024-12-07 Authors@R: c( person("Kirill", "Müller", , "kirill@cynkra.com", role = c("aut", "cre"), comment = c(ORCID = "0000-0002-1416-3412")), person("RStudio", role = "cph"), person("R Consortium", role = "fnd") ) Description: A helper that tests DBI back ends for conformity to the interface. License: LGPL (>= 2.1) URL: https://dbitest.r-dbi.org, https://github.com/r-dbi/DBItest BugReports: https://github.com/r-dbi/DBItest/issues Depends: R (>= 3.2.0) Imports: blob (>= 1.2.0), callr, DBI (>= 1.2.3), desc, hms (>= 0.5.0), lubridate, magrittr, methods, nanoarrow, palmerpenguins, rlang (>= 0.2.0), testthat (>= 2.0.0), utils, withr Suggests: clipr, constructive, debugme, devtools, knitr, lintr, pkgload, rmarkdown, RSQLite VignetteBuilder: knitr Config/Needs/website: r-dbi/dbitemplate Config/autostyle/scope: line_breaks Config/autostyle/strict: false Config/testthat/edition: 3 Config/Needs/check: decor Encoding: UTF-8 KeepSource: true RoxygenNote: 7.3.2.9000 Collate: 'DBItest.R' 'compat-purrr.R' 'context.R' 'dbi.R' 'dummy.R' 'expectations.R' 'generics.R' 'import-dbi.R' 'import-testthat.R' 'run.R' 's4.R' 'spec-getting-started.R' 'spec-compliance-methods.R' 'spec-driver-constructor.R' 'spec-driver-data-type.R' 'spec-connection-data-type.R' 'spec-result-create-table-with-data-type.R' 'spec-driver-connect.R' 'spec-connection-disconnect.R' 'spec-result-send-query.R' 'spec-result-fetch.R' 'spec-result-roundtrip.R' 'spec-result-clear-result.R' 'spec-result-get-query.R' 'spec-result-send-statement.R' 'spec-result-execute.R' 'spec-sql-quote-string.R' 'spec-sql-quote-literal.R' 'spec-sql-quote-identifier.R' 'spec-sql-unquote-identifier.R' 'spec-sql-read-table.R' 'spec-sql-create-table.R' 'spec-sql-append-table.R' 'spec-sql-write-table.R' 'spec-sql-list-tables.R' 'spec-sql-exists-table.R' 'spec-sql-remove-table.R' 'spec-sql-list-objects.R' 'spec-meta-bind-runner.R' 'spec-meta-bind-formals.R' 'spec-meta-bind-expr.R' 'spec-meta-bind.R' 'spec-meta-bind-arrow.R' 'spec-meta-bind-stream.R' 'spec-meta-bind-arrow-stream.R' 'spec-meta-bind-.R' 'spec-meta-is-valid.R' 'spec-meta-has-completed.R' 'spec-meta-get-statement.R' 'spec-meta-get-row-count.R' 'spec-meta-get-rows-affected.R' 'spec-transaction-begin-commit-rollback.R' 'spec-transaction-with-transaction.R' 'spec-arrow-send-query-arrow.R' 'spec-arrow-fetch-arrow.R' 'spec-arrow-fetch-arrow-chunk.R' 'spec-arrow-get-query-arrow.R' 'spec-arrow-read-table-arrow.R' 'spec-arrow-write-table-arrow.R' 'spec-arrow-create-table-arrow.R' 'spec-arrow-append-table-arrow.R' 'spec-arrow-bind.R' 'spec-arrow-roundtrip.R' 'spec-driver-get-info.R' 'spec-connection-get-info.R' 'spec-sql-list-fields.R' 'spec-meta-column-info.R' 'spec-meta-get-info-result.R' 'spec-driver.R' 'spec-connection.R' 'spec-result.R' 'spec-sql.R' 'spec-meta.R' 'spec-arrow.R' 'spec-transaction.R' 'spec-compliance.R' 'spec-stress-connection.R' 'spec-stress.R' 'spec-all.R' 'spec-.R' 'test-all.R' 'test-getting-started.R' 'test-driver.R' 'test-connection.R' 'test-result.R' 'test-sql.R' 'test-meta.R' 'test-transaction.R' 'test-arrow.R' 'test-compliance.R' 'test-stress.R' 'test_backend.R' 'tweaks.R' 'utf8.R' 'utils.R' 'zzz.R' NeedsCompilation: no Packaged: 2024-12-07 14:52:46 UTC; kirill Author: Kirill Müller [aut, cre] (), RStudio [cph], R Consortium [fnd] Maintainer: Kirill Müller Repository: CRAN Date/Publication: 2024-12-07 15:10:01 UTC