From 1778bbc0f71321674e97b54bacc9384e9edb8732 Mon Sep 17 00:00:00 2001 From: Zac Davies <80654433+zacdav-db@users.noreply.github.com> Date: Tue, 11 Jun 2024 08:25:40 -0700 Subject: [PATCH 01/17] Allowing spark sql to write non-temporary tables (#1514) Co-authored-by: Zac Davies --- R/backend-spark-sql.R | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/R/backend-spark-sql.R b/R/backend-spark-sql.R index 8cd9288d1..cabb0d1c3 100644 --- a/R/backend-spark-sql.R +++ b/R/backend-spark-sql.R @@ -126,12 +126,8 @@ simulate_spark_sql <- function() simulate_dbi("Spark SQL") analyze = TRUE, in_transaction = FALSE) { - if (temporary) { - sql <- sql_values_subquery(con, values, types = types, lvl = 1) - db_compute(con, table, sql, overwrite = overwrite) - } else { - NextMethod() - } + sql <- sql_values_subquery(con, values, types = types, lvl = 1) + db_compute(con, table, sql, overwrite = overwrite, temporary = temporary) } #' @export @@ -146,14 +142,11 @@ simulate_spark_sql <- function() simulate_dbi("Spark SQL") analyze = TRUE, in_transaction = FALSE) { - if (!temporary) { - cli::cli_abort("Spark SQL only support temporary tables") - } - sql <- glue_sql2( con, "CREATE ", if (overwrite) "OR REPLACE ", - "TEMPORARY VIEW {.tbl {table}} AS \n", + if (temporary) "TEMPORARY VIEW" else "TABLE", + " {.tbl {table}} AS \n", "{.from {sql}}" ) DBI::dbExecute(con, sql) From 860bd6adf988a2b039485030d234eba05ee46a3c Mon Sep 17 00:00:00 2001 From: Zac Davies <80654433+zacdav-db@users.noreply.github.com> Date: Tue, 11 Jun 2024 08:57:36 -0700 Subject: [PATCH 02/17] Update `NEWS.md` (#1516) Part of #1514 Co-authored-by: Zac Davies --- NEWS.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/NEWS.md b/NEWS.md index dae72d35d..496fc7d0b 100644 --- a/NEWS.md +++ b/NEWS.md @@ -2,6 +2,9 @@ * `across(everything())` doesn't select grouping columns created via `.by` in `summarise()` (@mgirlich, #1493). + +* Spark SQL backend now supports persisting tables with + `compute(x, name = I("x.y.z"), temporary = FALSE)` (@zacdav-db, #1502). # dbplyr 2.5.0 From c3aa40677527150cd3789b95a0f1ce3927b5e56c Mon Sep 17 00:00:00 2001 From: Enrico Spinielli Date: Mon, 9 Sep 2024 15:07:21 +0200 Subject: [PATCH 03/17] fix typo in documentation (#1539) --- R/src_dbi.R | 2 +- man/tbl.src_dbi.Rd | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/R/src_dbi.R b/R/src_dbi.R index 5c62b7e56..6dbb2d423 100644 --- a/R/src_dbi.R +++ b/R/src_dbi.R @@ -3,7 +3,7 @@ #' All data manipulation on SQL tbls are lazy: they will not actually #' run the query or retrieve the data unless you ask for it: they all return #' a new `tbl_dbi` object. Use [compute()] to run the query and save the -#' results in a temporary in the database, or use [collect()] to retrieve the +#' results in a temporary table in the database, or use [collect()] to retrieve the #' results to R. You can see the query with [show_query()]. #' #' @details diff --git a/man/tbl.src_dbi.Rd b/man/tbl.src_dbi.Rd index c2c4ad005..13c72f004 100644 --- a/man/tbl.src_dbi.Rd +++ b/man/tbl.src_dbi.Rd @@ -23,7 +23,7 @@ also use \code{\link[=in_schema]{in_schema()}}/\code{\link[=in_catalog]{in_catal All data manipulation on SQL tbls are lazy: they will not actually run the query or retrieve the data unless you ask for it: they all return a new \code{tbl_dbi} object. Use \code{\link[=compute]{compute()}} to run the query and save the -results in a temporary in the database, or use \code{\link[=collect]{collect()}} to retrieve the +results in a temporary table in the database, or use \code{\link[=collect]{collect()}} to retrieve the results to R. You can see the query with \code{\link[=show_query]{show_query()}}. } \details{ From 1ff6363437c3ce9292a1c7c3b37a4073965bb2ec Mon Sep 17 00:00:00 2001 From: edward-burn <9583964+edward-burn@users.noreply.github.com> Date: Mon, 9 Sep 2024 17:20:11 +0100 Subject: [PATCH 04/17] correct sign of difftime for various DBMS' (#1532) --------- Co-authored-by: Simon P. Couch --- NEWS.md | 3 +++ R/backend-mssql.R | 2 +- R/backend-postgres.R | 2 +- R/backend-redshift.R | 2 +- R/backend-snowflake.R | 2 +- tests/testthat/test-backend-mssql.R | 4 ++-- tests/testthat/test-backend-postgres.R | 4 ++-- tests/testthat/test-backend-redshift.R | 4 ++-- tests/testthat/test-backend-snowflake.R | 4 ++-- 9 files changed, 15 insertions(+), 12 deletions(-) diff --git a/NEWS.md b/NEWS.md index 496fc7d0b..7208d0936 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,5 +1,8 @@ # dbplyr (development version) +* Translations of `difftime()` for Postgres, SQL server, Redshift, and Snowflake + previously returned the wrong sign and are now correct (@edward-burn, #1532). + * `across(everything())` doesn't select grouping columns created via `.by` in `summarise()` (@mgirlich, #1493). diff --git a/R/backend-mssql.R b/R/backend-mssql.R index 1850e6d89..ad74148bb 100644 --- a/R/backend-mssql.R +++ b/R/backend-mssql.R @@ -383,7 +383,7 @@ simulate_mssql <- function(version = "15.0") { cli::cli_abort('The only supported value for {.arg units} on SQL backends is "days"') } - sql_expr(DATEDIFF(DAY, !!time1, !!time2)) + sql_expr(DATEDIFF(DAY, !!time2, !!time1)) } ) diff --git a/R/backend-postgres.R b/R/backend-postgres.R index 5f9f0ba6f..cc7f11a26 100644 --- a/R/backend-postgres.R +++ b/R/backend-postgres.R @@ -268,7 +268,7 @@ sql_translation.PqConnection <- function(con) { cli::cli_abort('The only supported value for {.arg units} on SQL backends is "days"') } - sql_expr((CAST(!!time2 %AS% DATE) - CAST(!!time1 %AS% DATE))) + sql_expr((CAST(!!time1 %AS% DATE) - CAST(!!time2 %AS% DATE))) }, ), sql_translator(.parent = base_agg, diff --git a/R/backend-redshift.R b/R/backend-redshift.R index b3f35d3c5..d12544107 100644 --- a/R/backend-redshift.R +++ b/R/backend-redshift.R @@ -94,7 +94,7 @@ sql_translation.RedshiftConnection <- function(con) { cli::cli_abort('The only supported value for {.arg units} on SQL backends is "days"') } - sql_expr(DATEDIFF(DAY, !!time1, !!time2)) + sql_expr(DATEDIFF(DAY, !!time2, !!time1)) } ), sql_translator(.parent = postgres$aggregate, diff --git a/R/backend-snowflake.R b/R/backend-snowflake.R index afb696f2d..c4d8700f6 100644 --- a/R/backend-snowflake.R +++ b/R/backend-snowflake.R @@ -243,7 +243,7 @@ sql_translation.Snowflake <- function(con) { cli::cli_abort('The only supported value for {.arg units} on SQL backends is "days"') } - sql_expr(DATEDIFF(DAY, !!time1, !!time2)) + sql_expr(DATEDIFF(DAY, !!time2, !!time1)) }, # LEAST / GREATEST on Snowflake will not respect na.rm = TRUE by default (similar to Oracle/Access) # https://docs.snowflake.com/en/sql-reference/functions/least diff --git a/tests/testthat/test-backend-mssql.R b/tests/testthat/test-backend-mssql.R index bab3508d3..709a81631 100644 --- a/tests/testthat/test-backend-mssql.R +++ b/tests/testthat/test-backend-mssql.R @@ -143,8 +143,8 @@ test_that("custom clock functions translated correctly", { test_that("difftime is translated correctly", { local_con(simulate_mssql()) - expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("DATEDIFF(DAY, `start_date`, `end_date`)")) - expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("DATEDIFF(DAY, `start_date`, `end_date`)")) + expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("DATEDIFF(DAY, `end_date`, `start_date`)")) + expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("DATEDIFF(DAY, `end_date`, `start_date`)")) expect_error(test_translate_sql(difftime(start_date, end_date, units = "auto"))) expect_error(test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days"))) diff --git a/tests/testthat/test-backend-postgres.R b/tests/testthat/test-backend-postgres.R index 0517f195e..7c43ebe23 100644 --- a/tests/testthat/test-backend-postgres.R +++ b/tests/testthat/test-backend-postgres.R @@ -102,8 +102,8 @@ test_that("custom clock functions translated correctly", { test_that("difftime is translated correctly", { local_con(simulate_postgres()) - expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("(CAST(`end_date` AS DATE) - CAST(`start_date` AS DATE))")) - expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("(CAST(`end_date` AS DATE) - CAST(`start_date` AS DATE))")) + expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("(CAST(`start_date` AS DATE) - CAST(`end_date` AS DATE))")) + expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("(CAST(`start_date` AS DATE) - CAST(`end_date` AS DATE))")) expect_error(test_translate_sql(difftime(start_date, end_date, units = "auto"))) expect_error(test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days"))) diff --git a/tests/testthat/test-backend-redshift.R b/tests/testthat/test-backend-redshift.R index f2b5a9699..879fe4517 100644 --- a/tests/testthat/test-backend-redshift.R +++ b/tests/testthat/test-backend-redshift.R @@ -72,8 +72,8 @@ test_that("custom clock functions translated correctly", { test_that("difftime is translated correctly", { local_con(simulate_redshift()) - expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("DATEDIFF(DAY, `start_date`, `end_date`)")) - expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("DATEDIFF(DAY, `start_date`, `end_date`)")) + expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("DATEDIFF(DAY, `end_date`, `start_date`)")) + expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("DATEDIFF(DAY, `end_date`, `start_date`)")) expect_error(test_translate_sql(difftime(start_date, end_date, units = "auto"))) expect_error(test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days"))) diff --git a/tests/testthat/test-backend-snowflake.R b/tests/testthat/test-backend-snowflake.R index 37c50828f..89d091de0 100644 --- a/tests/testthat/test-backend-snowflake.R +++ b/tests/testthat/test-backend-snowflake.R @@ -116,8 +116,8 @@ test_that("custom clock functions translated correctly", { test_that("difftime is translated correctly", { local_con(simulate_snowflake()) - expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("DATEDIFF(DAY, `start_date`, `end_date`)")) - expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("DATEDIFF(DAY, `start_date`, `end_date`)")) + expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("DATEDIFF(DAY, `end_date`, `start_date`)")) + expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("DATEDIFF(DAY, `end_date`, `start_date`)")) expect_error(test_translate_sql(difftime(start_date, end_date, units = "auto"))) expect_error(test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days"))) From 0615a65cf142f44899161a693b4aee68b1b81125 Mon Sep 17 00:00:00 2001 From: edward-burn <9583964+edward-burn@users.noreply.github.com> Date: Fri, 18 Oct 2024 22:11:36 +0100 Subject: [PATCH 05/17] translations for `date_count_between` (#1496) --------- Co-authored-by: Simon P. Couch --- NEWS.md | 4 +++- R/backend-mssql.R | 12 ++++++++++++ R/backend-postgres.R | 12 ++++++++++++ R/backend-redshift.R | 12 ++++++++++++ R/backend-snowflake.R | 12 ++++++++++++ R/backend-spark-sql.R | 12 ++++++++++++ tests/testthat/test-backend-mssql.R | 4 ++++ tests/testthat/test-backend-postgres.R | 4 ++++ tests/testthat/test-backend-redshift.R | 4 ++++ tests/testthat/test-backend-snowflake.R | 4 ++++ tests/testthat/test-backend-spark-sql.R | 4 ++++ 11 files changed, 83 insertions(+), 1 deletion(-) diff --git a/NEWS.md b/NEWS.md index 7208d0936..5653e433a 100644 --- a/NEWS.md +++ b/NEWS.md @@ -5,7 +5,9 @@ * `across(everything())` doesn't select grouping columns created via `.by` in `summarise()` (@mgirlich, #1493). - + +* New translations of clock function `date_count_between()` for SQL server, Redshift, Snowflake, Postgres, and Spark (@edward-burn, #1495). + * Spark SQL backend now supports persisting tables with `compute(x, name = I("x.y.z"), temporary = FALSE)` (@zacdav-db, #1502). diff --git a/R/backend-mssql.R b/R/backend-mssql.R index ad74148bb..ae70b7b86 100644 --- a/R/backend-mssql.R +++ b/R/backend-mssql.R @@ -372,6 +372,18 @@ simulate_mssql <- function(version = "15.0") { get_day = function(x) { sql_expr(DATEPART(DAY, !!x)) }, + date_count_between = function(start, end, precision, ..., n = 1L){ + + check_dots_empty() + if (precision != "day") { + cli_abort("{.arg precision} must be {.val day} on SQL backends.") + } + if (n != 1) { + cli_abort("{.arg n} must be {.val 1} on SQL backends.") + } + + sql_expr(DATEDIFF(DAY, !!start, !!end)) + }, difftime = function(time1, time2, tz, units = "days") { diff --git a/R/backend-postgres.R b/R/backend-postgres.R index cc7f11a26..a766149c3 100644 --- a/R/backend-postgres.R +++ b/R/backend-postgres.R @@ -248,6 +248,18 @@ sql_translation.PqConnection <- function(con) { date_build = function(year, month = 1L, day = 1L, ..., invalid = NULL) { sql_expr(make_date(!!year, !!month, !!day)) }, + date_count_between = function(start, end, precision, ..., n = 1L){ + + check_dots_empty() + if (precision != "day") { + cli_abort("{.arg precision} must be {.val day} on SQL backends.") + } + if (n != 1) { + cli_abort("{.arg n} must be {.val 1} on SQL backends.") + } + + sql_expr(!!end - !!start) + }, get_year = function(x) { sql_expr(date_part('year', !!x)) }, diff --git a/R/backend-redshift.R b/R/backend-redshift.R index d12544107..d02d4a1a9 100644 --- a/R/backend-redshift.R +++ b/R/backend-redshift.R @@ -83,6 +83,18 @@ sql_translation.RedshiftConnection <- function(con) { get_day = function(x) { sql_expr(DATE_PART('day', !!x)) }, + date_count_between = function(start, end, precision, ..., n = 1L){ + + check_dots_empty() + if (precision != "day") { + cli_abort("{.arg precision} must be {.val day} on SQL backends.") + } + if (n != 1) { + cli_abort("{.arg n} must be {.val 1} on SQL backends.") + } + + sql_expr(DATEDIFF(DAY, !!start, !!end)) + }, difftime = function(time1, time2, tz, units = "days") { diff --git a/R/backend-snowflake.R b/R/backend-snowflake.R index c4d8700f6..8219da421 100644 --- a/R/backend-snowflake.R +++ b/R/backend-snowflake.R @@ -232,6 +232,18 @@ sql_translation.Snowflake <- function(con) { get_day = function(x) { sql_expr(DATE_PART(DAY, !!x)) }, + date_count_between = function(start, end, precision, ..., n = 1L){ + + check_dots_empty() + if (precision != "day") { + cli_abort("{.arg precision} must be {.val day} on SQL backends.") + } + if (n != 1) { + cli_abort("{.arg n} must be {.val 1} on SQL backends.") + } + + sql_expr(DATEDIFF(DAY, !!start, !!end)) + }, difftime = function(time1, time2, tz, units = "days") { diff --git a/R/backend-spark-sql.R b/R/backend-spark-sql.R index cabb0d1c3..c551f830a 100644 --- a/R/backend-spark-sql.R +++ b/R/backend-spark-sql.R @@ -58,6 +58,18 @@ simulate_spark_sql <- function() simulate_dbi("Spark SQL") get_day = function(x) { sql_expr(date_part('DAY', !!x)) }, + date_count_between = function(start, end, precision, ..., n = 1L){ + + check_dots_empty() + if (precision != "day") { + cli_abort("{.arg precision} must be {.val day} on SQL backends.") + } + if (n != 1) { + cli_abort("{.arg n} must be {.val 1} on SQL backends.") + } + + sql_expr(datediff(!!end, !!start)) + }, difftime = function(time1, time2, tz, units = "days") { diff --git a/tests/testthat/test-backend-mssql.R b/tests/testthat/test-backend-mssql.R index 709a81631..682ca1b1a 100644 --- a/tests/testthat/test-backend-mssql.R +++ b/tests/testthat/test-backend-mssql.R @@ -139,6 +139,10 @@ test_that("custom clock functions translated correctly", { expect_equal(test_translate_sql(get_year(date_column)), sql("DATEPART(YEAR, `date_column`)")) expect_equal(test_translate_sql(get_month(date_column)), sql("DATEPART(MONTH, `date_column`)")) expect_equal(test_translate_sql(get_day(date_column)), sql("DATEPART(DAY, `date_column`)")) + expect_equal(test_translate_sql(date_count_between(date_column_1, date_column_2, "day")), + sql("DATEDIFF(DAY, `date_column_1`, `date_column_2`)")) + expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "year"))) + expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5))) }) test_that("difftime is translated correctly", { diff --git a/tests/testthat/test-backend-postgres.R b/tests/testthat/test-backend-postgres.R index 7c43ebe23..5ae2497ae 100644 --- a/tests/testthat/test-backend-postgres.R +++ b/tests/testthat/test-backend-postgres.R @@ -98,6 +98,10 @@ test_that("custom clock functions translated correctly", { expect_equal(test_translate_sql(get_year(date_column)), sql("DATE_PART('year', `date_column`)")) expect_equal(test_translate_sql(get_month(date_column)), sql("DATE_PART('month', `date_column`)")) expect_equal(test_translate_sql(get_day(date_column)), sql("DATE_PART('day', `date_column`)")) + expect_equal(test_translate_sql(date_count_between(date_column_1, date_column_2, "day")), + sql("`date_column_2` - `date_column_1`")) + expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "year"))) + expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5))) }) test_that("difftime is translated correctly", { diff --git a/tests/testthat/test-backend-redshift.R b/tests/testthat/test-backend-redshift.R index 879fe4517..3ee191d8f 100644 --- a/tests/testthat/test-backend-redshift.R +++ b/tests/testthat/test-backend-redshift.R @@ -68,6 +68,10 @@ test_that("custom clock functions translated correctly", { expect_equal(test_translate_sql(get_year(date_column)), sql("DATE_PART('year', `date_column`)")) expect_equal(test_translate_sql(get_month(date_column)), sql("DATE_PART('month', `date_column`)")) expect_equal(test_translate_sql(get_day(date_column)), sql("DATE_PART('day', `date_column`)")) + expect_equal(test_translate_sql(date_count_between(date_column_1, date_column_2, "day")), + sql("DATEDIFF(DAY, `date_column_1`, `date_column_2`)")) + expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "year"))) + expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5))) }) test_that("difftime is translated correctly", { diff --git a/tests/testthat/test-backend-snowflake.R b/tests/testthat/test-backend-snowflake.R index 89d091de0..208532291 100644 --- a/tests/testthat/test-backend-snowflake.R +++ b/tests/testthat/test-backend-snowflake.R @@ -112,6 +112,10 @@ test_that("custom clock functions translated correctly", { expect_equal(test_translate_sql(get_year(date_column)), sql("DATE_PART(YEAR, `date_column`)")) expect_equal(test_translate_sql(get_month(date_column)), sql("DATE_PART(MONTH, `date_column`)")) expect_equal(test_translate_sql(get_day(date_column)), sql("DATE_PART(DAY, `date_column`)")) + expect_equal(test_translate_sql(date_count_between(date_column_1, date_column_2, "day")), + sql("DATEDIFF(DAY, `date_column_1`, `date_column_2`)")) + expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "year"))) + expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5))) }) test_that("difftime is translated correctly", { diff --git a/tests/testthat/test-backend-spark-sql.R b/tests/testthat/test-backend-spark-sql.R index e1276c7a0..5332457d3 100644 --- a/tests/testthat/test-backend-spark-sql.R +++ b/tests/testthat/test-backend-spark-sql.R @@ -8,6 +8,10 @@ test_that("custom clock functions translated correctly", { expect_equal(test_translate_sql(get_year(date_column)), sql("DATE_PART('YEAR', `date_column`)")) expect_equal(test_translate_sql(get_month(date_column)), sql("DATE_PART('MONTH', `date_column`)")) expect_equal(test_translate_sql(get_day(date_column)), sql("DATE_PART('DAY', `date_column`)")) + expect_equal(test_translate_sql(date_count_between(date_column_1, date_column_2, "day")), + sql("DATEDIFF(`date_column_2`, `date_column_1`)")) + expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "year"))) + expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5))) }) test_that("difftime is translated correctly", { From 7fb52d7e39283a04f8bd83cdd6132058a0b566dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Simon?= <74007913+rplsmn@users.noreply.github.com> Date: Thu, 24 Oct 2024 16:37:10 +0200 Subject: [PATCH 06/17] address `as.double()` and `as.character()` errors with Teradata (#1545) --------- Co-authored-by: simonpcouch --- NEWS.md | 3 +++ R/backend-teradata.R | 8 ++++++-- tests/testthat/test-backend-teradata.R | 5 +++-- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/NEWS.md b/NEWS.md index 5653e433a..fc76ea9bb 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,5 +1,8 @@ # dbplyr (development version) +* Translations for `as.double()` and `as.character()` with Teradata previously + raised errors and are now correct (@rplsmn, #1545). + * Translations of `difftime()` for Postgres, SQL server, Redshift, and Snowflake previously returned the wrong sign and are now correct (@edward-burn, #1532). diff --git a/R/backend-teradata.R b/R/backend-teradata.R index ba17d3a45..e5fc0c861 100644 --- a/R/backend-teradata.R +++ b/R/backend-teradata.R @@ -109,8 +109,12 @@ sql_translation.Teradata <- function(con) { digits <- vctrs::vec_cast(digits, integer()) sql_expr(CAST(!!x %as% DECIMAL(12L, !!digits))) }, - as.double = sql_cast("NUMERIC"), - as.character = sql_cast("VARCHAR(MAX)"), + as.double = sql_cast("FLOAT"), + as.character = function(x, nchar = 255L) { + check_number_whole(nchar, min = 0, max = 64000) + nchar <- vctrs::vec_cast(nchar, integer()) + sql_expr(CAST(!!x %as% VARCHAR(!!nchar))) + }, as.Date = teradata_as_date, log10 = sql_prefix("LOG"), log = sql_log(), diff --git a/tests/testthat/test-backend-teradata.R b/tests/testthat/test-backend-teradata.R index c1883097e..6595979b9 100644 --- a/tests/testthat/test-backend-teradata.R +++ b/tests/testthat/test-backend-teradata.R @@ -4,8 +4,9 @@ test_that("custom scalar translated correctly", { expect_equal(test_translate_sql(x != y), sql("`x` <> `y`")) expect_equal(test_translate_sql(as.numeric(x)), sql("CAST(`x` AS DECIMAL(12, 9))")) expect_equal(test_translate_sql(as.numeric(x, 8)), sql("CAST(`x` AS DECIMAL(12, 8))")) - expect_equal(test_translate_sql(as.double(x)), sql("CAST(`x` AS NUMERIC)")) - expect_equal(test_translate_sql(as.character(x)), sql("CAST(`x` AS VARCHAR(MAX))")) + expect_equal(test_translate_sql(as.double(x)), sql("CAST(`x` AS FLOAT)")) + expect_equal(test_translate_sql(as.character(x)), sql("CAST(`x` AS VARCHAR(255))")) + expect_equal(test_translate_sql(as.character(x, 12)), sql("CAST(`x` AS VARCHAR(12))")) expect_equal(test_translate_sql(log(x)), sql("LN(`x`)")) expect_equal(test_translate_sql(cot(x)), sql("1 / TAN(`x`)")) expect_equal(test_translate_sql(nchar(x)), sql("CHARACTER_LENGTH(`x`)")) From c7a6d02112a188e99bd59d7a35c92a69e564f0bb Mon Sep 17 00:00:00 2001 From: Adam Black Date: Thu, 31 Oct 2024 14:36:45 +0100 Subject: [PATCH 07/17] fix `add_years()` translation on spark (#1511) --- NEWS.md | 1 + R/backend-spark-sql.R | 2 +- tests/testthat/test-backend-spark-sql.R | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/NEWS.md b/NEWS.md index fc76ea9bb..bf959d524 100644 --- a/NEWS.md +++ b/NEWS.md @@ -8,6 +8,7 @@ * `across(everything())` doesn't select grouping columns created via `.by` in `summarise()` (@mgirlich, #1493). +* `clock::add_years()` translates to correct SQL on Spark (@ablack3, #1510). * New translations of clock function `date_count_between()` for SQL server, Redshift, Snowflake, Postgres, and Spark (@edward-burn, #1495). diff --git a/R/backend-spark-sql.R b/R/backend-spark-sql.R index c551f830a..88de3f7a5 100644 --- a/R/backend-spark-sql.R +++ b/R/backend-spark-sql.R @@ -44,7 +44,7 @@ simulate_spark_sql <- function() simulate_dbi("Spark SQL") }, add_years = function(x, n, ...) { check_dots_empty() - sql_expr(add_months(!!!x, !!n*12)) + sql_expr(add_months(!!x, !!n*12)) }, date_build = function(year, month = 1L, day = 1L, ..., invalid = NULL) { sql_expr(make_date(!!year, !!month, !!day)) diff --git a/tests/testthat/test-backend-spark-sql.R b/tests/testthat/test-backend-spark-sql.R index 5332457d3..4a005abf1 100644 --- a/tests/testthat/test-backend-spark-sql.R +++ b/tests/testthat/test-backend-spark-sql.R @@ -1,6 +1,6 @@ test_that("custom clock functions translated correctly", { local_con(simulate_spark_sql()) - expect_equal(test_translate_sql(add_years(x, 1)), sql("ADD_MONTHS('`x`', 1.0 * 12.0)")) + expect_equal(test_translate_sql(add_years(x, 1)), sql("ADD_MONTHS(`x`, 1.0 * 12.0)")) expect_equal(test_translate_sql(add_days(x, 1)), sql("DATE_ADD(`x`, 1.0)")) expect_error(test_translate_sql(add_days(x, 1, "dots", "must", "be empty"))) expect_equal(test_translate_sql(date_build(2020, 1, 1)), sql("MAKE_DATE(2020.0, 1.0, 1.0)")) From d51bc0d2fe47c27df1d339b33fd56e7ce4041129 Mon Sep 17 00:00:00 2001 From: simonpcouch Date: Thu, 31 Oct 2024 08:37:20 -0500 Subject: [PATCH 08/17] move most recently merged bullet to top (#1511) --- NEWS.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/NEWS.md b/NEWS.md index bf959d524..e08ad2d69 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,5 +1,7 @@ # dbplyr (development version) +* `clock::add_years()` translates to correct SQL on Spark (@ablack3, #1510). + * Translations for `as.double()` and `as.character()` with Teradata previously raised errors and are now correct (@rplsmn, #1545). @@ -8,7 +10,6 @@ * `across(everything())` doesn't select grouping columns created via `.by` in `summarise()` (@mgirlich, #1493). -* `clock::add_years()` translates to correct SQL on Spark (@ablack3, #1510). * New translations of clock function `date_count_between()` for SQL server, Redshift, Snowflake, Postgres, and Spark (@edward-burn, #1495). From 1abf1d6995176e0f208d9ec02cbeb245cc0677d6 Mon Sep 17 00:00:00 2001 From: Ben Bolker Date: Thu, 31 Oct 2024 09:42:23 -0400 Subject: [PATCH 09/17] various small translate-sql vignette corrections (#1550) --- vignettes/translation-function.Rmd | 32 +++++++++++++++--------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/vignettes/translation-function.Rmd b/vignettes/translation-function.Rmd index 5d403d795..bbb31e314 100644 --- a/vignettes/translation-function.Rmd +++ b/vignettes/translation-function.Rmd @@ -30,7 +30,7 @@ con <- simulate_dbi() translate_sql((x + y) / 2, con = con) ``` -`translate_sql()` takes an optional `con` parameter. If not supplied, this causes dplyr to generate (approximately) SQL-92 compliant SQL. If supplied, dplyr uses `sql_translation()` to look up a custom environment which makes it possible for different databases to generate slightly different SQL: see `vignette("new-backend")` for more details. You can use the various simulate helpers to see the translations used by different backends: +`translate_sql()` takes an optional `con` parameter. If not supplied, this causes dbplyr to generate (approximately) SQL-92 compliant SQL. If supplied, dbplyr uses `sql_translation()` to look up a custom environment which makes it possible for different databases to generate slightly different SQL: see `vignette("new-backend")` for more details. You can use the various simulate helpers to see the translations used by different backends: ```{r} translate_sql(x ^ 2L, con = con) @@ -38,7 +38,7 @@ translate_sql(x ^ 2L, con = simulate_sqlite()) translate_sql(x ^ 2L, con = simulate_access()) ``` -Perfect translation is not possible because databases don't have all the functions that R does. The goal of dplyr is to provide a semantic rather than a literal translation: what you mean, rather than precisely what is done. In fact, even for functions that exist both in databases and R, you shouldn't expect results to be identical; database programmers have different priorities than R core programmers. For example, in R in order to get a higher level of numerical accuracy, `mean()` loops through the data twice. R's `mean()` also provides a `trim` option for computing trimmed means; this is something that databases do not provide. +Perfect translation is not possible because databases don't have all the functions that R does. The goal of dbplyr is to provide a semantic rather than a literal translation: what you mean, rather than precisely what is done. In fact, even for functions that exist both in databases and in R, you shouldn't expect results to be identical; database programmers have different priorities than R core programmers. For example, in R in order to get a higher level of numerical accuracy, `mean()` loops through the data twice. R's `mean()` also provides a `trim` option for computing trimmed means; this is something that databases do not provide. If you're interested in how `translate_sql()` is implemented, the basic techniques that underlie the implementation of `translate_sql()` are described in ["Advanced R"](https://adv-r.hadley.nz/translation.html). @@ -63,7 +63,7 @@ The following examples work through some of the basic differences between R and ``` * R and SQL have different defaults for integers and reals. - In R, 1 is a real, and 1L is an integer. In SQL, 1 is an integer, and 1.0 is a real + In R, 1 is a real, and 1L is an integer. In SQL, 1 is an integer, and 1.0 is a real. ```{r} translate_sql(1, con = con) @@ -104,7 +104,7 @@ dbplyr no longer translates `%/%` because there's no robust cross-database trans ### Aggregation -All database provide translation for the basic aggregations: `mean()`, `sum()`, `min()`, `max()`, `sd()`, `var()`. Databases automatically drop NULLs (their equivalent of missing values) whereas in R you have to ask nicely. The aggregation functions warn you about this important difference: +All databases provide translation for the basic aggregations: `mean()`, `sum()`, `min()`, `max()`, `sd()`, `var()`. Databases automatically drop NULLs (their equivalent of missing values) whereas in R you have to ask nicely. The aggregation functions warn you about this important difference: ```{r} translate_sql(mean(x), con = con) @@ -119,7 +119,7 @@ translate_sql(mean(x, na.rm = TRUE), window = FALSE, con = con) ### Conditional evaluation -`if` and `switch()` are translate to `CASE WHEN`: +`if` and `switch()` are translated to `CASE WHEN`: ```{r} translate_sql(if (x > 5) "big" else "small", con = con) @@ -135,7 +135,7 @@ translate_sql(switch(x, a = 1L, b = 2L, 3L), con = con) ## Unknown functions -Any function that dplyr doesn't know how to convert is left as is. This means that database functions that are not covered by dplyr can often be used directly via `translate_sql()`. +Any function that dbplyr doesn't know how to convert is left as is. This means that database functions that are not covered by dbplyr can often be used directly via `translate_sql()`. ### Prefix functions @@ -145,7 +145,7 @@ Any function that dbplyr doesn't know about will be left as is: translate_sql(foofify(x, y), con = con) ``` -Because SQL functions are general case insensitive, I recommend using upper case when you're using SQL functions in R code. That makes it easier to spot that you're doing something unusual: +Because SQL functions are generally case insensitive, I recommend using upper case when you're using SQL functions in R code. That makes it easier to spot that you're doing something unusual: ```{r} translate_sql(FOOFIFY(x, y), con = con) @@ -153,7 +153,7 @@ translate_sql(FOOFIFY(x, y), con = con) ### Infix functions -As well as prefix functions (where the name of the function comes before the arguments), dbplyr also translates infix functions. That allows you to use expressions like `LIKE` which does a limited form of pattern matching: +As well as prefix functions (where the name of the function comes before the arguments), dbplyr also translates infix functions. That allows you to use expressions like `LIKE`, which does a limited form of pattern matching: ```{r} translate_sql(x %LIKE% "%foo%", con = con) @@ -190,7 +190,7 @@ mf %>% ### Error for unknown translations -If needed, you can also force dbplyr to error if it doesn't know how to translate a function with the `dplyr.strict_sql` option: +If needed, you can also use the `dplyr.strict_sql` option to force dbplyr to error if it doesn't know how to translate a function: ```{r} #| error = TRUE @@ -245,16 +245,16 @@ Things get a little trickier with window functions, because SQL's window functio knitr::include_graphics("windows.png", dpi = 300) ``` - Of the many possible specifications, there are only three that commonly + Of the many possible specifications, only three are commonly used. They select between aggregation variants: - * Recycled: `BETWEEN UNBOUND PRECEEDING AND UNBOUND FOLLOWING` + * Recycled: `BETWEEN UNBOUND PRECEDING AND UNBOUND FOLLOWING` - * Cumulative: `BETWEEN UNBOUND PRECEEDING AND CURRENT ROW` + * Cumulative: `BETWEEN UNBOUND PRECEDING AND CURRENT ROW` - * Rolling: `BETWEEN 2 PRECEEDING AND 2 FOLLOWING` + * Rolling: `BETWEEN 2 PRECEDING AND 2 FOLLOWING` - dplyr generates the frame clause based on whether your using a recycled + dbplyr generates the frame clause based on whether you're using a recycled aggregate or a cumulative aggregate. To see how individual window functions are translated to SQL, we can again use `translate_sql()`: @@ -266,14 +266,14 @@ translate_sql(ntile(G, 2), con = con) translate_sql(lag(G), con = con) ``` -If the tbl has been grouped or arranged previously in the pipeline, then dplyr will use that information to set the "partition by" and "order by" clauses. For interactive exploration, you can achieve the same effect by setting the `vars_group` and `vars_order` arguments to `translate_sql()` +If the tbl has been grouped or arranged previously in the pipeline, then dplyr will use that information to set the "partition by" and "order by" clauses. For interactive exploration, you can achieve the same effect by setting the `vars_group` and `vars_order` arguments to `translate_sql()`: ```{r} translate_sql(cummean(G), vars_order = "year", con = con) translate_sql(rank(), vars_group = "ID", con = con) ``` -There are some challenges when translating window functions between R and SQL, because dplyr tries to keep the window functions as similar as possible to both the existing R analogues and to the SQL functions. This means that there are three ways to control the order clause depending on which window function you're using: +There are some challenges when translating window functions between R and SQL, because dbplyr tries to keep the window functions as similar as possible to both the existing R analogues and to the SQL functions. This means that there are three ways to control the order clause depending on which window function you're using: * For ranking functions, the ordering variable is the first argument: `rank(x)`, `ntile(y, 2)`. If omitted or `NULL`, will use the default ordering associated From ad8a9d9f4f2e3791deb3beb4badc569f1496b9b0 Mon Sep 17 00:00:00 2001 From: "Simon P. Couch" Date: Thu, 31 Oct 2024 09:41:25 -0500 Subject: [PATCH 10/17] transition `expect_error()` for snowflake backend (#1547) * transition `expect_error()` - ...to snapshot when testing the error message - ...to classed error when testing an error from another package or an error thrown from a widely-used helper - ...to `expect_no_error()` (or no test) when `regex = NA` * Analogous story for `expect_warning()` and `expect_message()`. --- R/utils-check.R | 2 +- tests/testthat/_snaps/backend-snowflake.md | 25 +++++++++++++++++++ tests/testthat/_snaps/rows.md | 2 +- .../_snaps/translate-sql-conditional.md | 6 ++--- tests/testthat/_snaps/verb-pivot-wider.md | 2 +- tests/testthat/test-backend-snowflake.R | 22 ++++++++++++---- 6 files changed, 48 insertions(+), 11 deletions(-) diff --git a/R/utils-check.R b/R/utils-check.R index edf407936..e3558b211 100644 --- a/R/utils-check.R +++ b/R/utils-check.R @@ -144,7 +144,7 @@ check_unsupported_arg <- function(x, msg <- c(msg, i = allow_msg) } - cli_abort(msg, call = call) + cli_abort(msg, call = call, class = "dbplyr_error_unsupported_arg") } stop_unsupported_function <- function(f, ..., with = NULL, call = caller_env()) { diff --git a/tests/testthat/_snaps/backend-snowflake.md b/tests/testthat/_snaps/backend-snowflake.md index 2eef02c36..1c3ed964a 100644 --- a/tests/testthat/_snaps/backend-snowflake.md +++ b/tests/testthat/_snaps/backend-snowflake.md @@ -1,3 +1,28 @@ +# pasting translated correctly + + Code + test_translate_sql(paste0(x, collapse = "")) + Condition + Error in `check_collapse()`: + ! `collapse` not supported in DB translation of `paste()`. + i Please use `str_flatten()` instead. + +# difftime is translated correctly + + Code + test_translate_sql(difftime(start_date, end_date, units = "auto")) + Condition + Error in `difftime()`: + ! The only supported value for `units` on SQL backends is "days" + +--- + + Code + test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) + Condition + Error in `difftime()`: + ! The `tz` argument is not supported for SQL backends. + # pmin() and pmax() respect na.rm Code diff --git a/tests/testthat/_snaps/rows.md b/tests/testthat/_snaps/rows.md index 5e32ce606..d5f04fa8a 100644 --- a/tests/testthat/_snaps/rows.md +++ b/tests/testthat/_snaps/rows.md @@ -55,7 +55,7 @@ (expect_error(rows_insert(lf, lf, by = "x", conflict = "error", in_place = FALSE)) ) Output - + Error in `rows_insert()`: ! `conflict = "error"` isn't supported on database backends. i It must be "ignore" instead. diff --git a/tests/testthat/_snaps/translate-sql-conditional.md b/tests/testthat/_snaps/translate-sql-conditional.md index fdf87e0e9..f72cf8aee 100644 --- a/tests/testthat/_snaps/translate-sql-conditional.md +++ b/tests/testthat/_snaps/translate-sql-conditional.md @@ -48,13 +48,13 @@ (expect_error(test_translate_sql(case_when(x == 1L ~ "yes", .ptype = character()))) ) Output - + Error in `case_when()`: ! Argument `.ptype` isn't supported on database backends. Code (expect_error(test_translate_sql(case_when(x == 1L ~ "yes", .size = 1)))) Output - + Error in `case_when()`: ! Argument `.size` isn't supported on database backends. @@ -122,7 +122,7 @@ Code (expect_error(test_translate_sql(case_match(x, 1 ~ 1, .ptype = integer())))) Output - + Error in `case_match()`: ! Argument `.ptype` isn't supported on database backends. diff --git a/tests/testthat/_snaps/verb-pivot-wider.md b/tests/testthat/_snaps/verb-pivot-wider.md index dc138fc92..48844e74a 100644 --- a/tests/testthat/_snaps/verb-pivot-wider.md +++ b/tests/testthat/_snaps/verb-pivot-wider.md @@ -169,7 +169,7 @@ Code (expect_error(tidyr::pivot_wider(df, id_expand = TRUE))) Output - + Error in `tidyr::pivot_wider()`: ! `id_expand = TRUE` isn't supported on database backends. i It must be FALSE instead. diff --git a/tests/testthat/test-backend-snowflake.R b/tests/testthat/test-backend-snowflake.R index 208532291..a865587cf 100644 --- a/tests/testthat/test-backend-snowflake.R +++ b/tests/testthat/test-backend-snowflake.R @@ -14,7 +14,7 @@ test_that("pasting translated correctly", { expect_equal(test_translate_sql(str_c(x, y)), sql("CONCAT_WS('', `x`, `y`)")) expect_equal(test_translate_sql(str_c(x, y, sep = "|")), sql("CONCAT_WS('|', `x`, `y`)")) - expect_error(test_translate_sql(paste0(x, collapse = "")), "`collapse` not supported") + expect_snapshot(error = TRUE, test_translate_sql(paste0(x, collapse = ""))) expect_equal(test_translate_sql(str_flatten(x), window = TRUE), sql("LISTAGG(`x`, '') OVER ()")) expect_equal(test_translate_sql(str_flatten(x, collapse = "|"), window = TRUE), sql("LISTAGG(`x`, '|') OVER ()")) @@ -87,7 +87,10 @@ test_that("custom lubridate functions translated correctly", { )) expect_equal(test_translate_sql(quarter(x)), sql("EXTRACT('quarter', `x`)")) expect_equal(test_translate_sql(quarter(x, with_year = TRUE)), sql("(EXTRACT('year', `x`) || '.' || EXTRACT('quarter', `x`))")) - expect_error(test_translate_sql(quarter(x, fiscal_start = 2))) + expect_error( + test_translate_sql(quarter(x, fiscal_start = 2)), + class = "dbplyr_error_unsupported_arg" + ) expect_equal(test_translate_sql(isoyear(x)), sql("EXTRACT('year', `x`)")) expect_equal(test_translate_sql(seconds(x)), sql("INTERVAL '`x` second'")) @@ -106,7 +109,10 @@ test_that("custom clock functions translated correctly", { local_con(simulate_snowflake()) expect_equal(test_translate_sql(add_years(x, 1)), sql("DATEADD(YEAR, 1.0, `x`)")) expect_equal(test_translate_sql(add_days(x, 1)), sql("DATEADD(DAY, 1.0, `x`)")) - expect_error(test_translate_sql(add_days(x, 1, "dots", "must", "be empty"))) + expect_error( + test_translate_sql(add_days(x, 1, "dots", "must", "be empty")), + class = "rlib_error_dots_nonempty" + ) expect_equal(test_translate_sql(date_build(2020, 1, 1)), sql("DATE_FROM_PARTS(2020.0, 1.0, 1.0)")) expect_equal(test_translate_sql(date_build(year_column, 1L, 1L)), sql("DATE_FROM_PARTS(`year_column`, 1, 1)")) expect_equal(test_translate_sql(get_year(date_column)), sql("DATE_PART(YEAR, `date_column`)")) @@ -123,8 +129,14 @@ test_that("difftime is translated correctly", { expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("DATEDIFF(DAY, `end_date`, `start_date`)")) expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("DATEDIFF(DAY, `end_date`, `start_date`)")) - expect_error(test_translate_sql(difftime(start_date, end_date, units = "auto"))) - expect_error(test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days"))) + expect_snapshot( + error = TRUE, + test_translate_sql(difftime(start_date, end_date, units = "auto")) + ) + expect_snapshot( + error = TRUE, + test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) + ) }) test_that("min() and max()", { From d0994d1a3506d742c5acd5ae7bad814191fb03ef Mon Sep 17 00:00:00 2001 From: "Simon P. Couch" Date: Thu, 31 Oct 2024 16:35:18 -0500 Subject: [PATCH 11/17] audit calls to `expect_error()` for `backend-*.R` files (#1553) * audit `expect_error()` for `backend-.R` * audit `expect_error()` for `backend-access.R` * audit `expect_error()` for `backend-mssql.R` * audit `expect_error()` for `backend-oracle.R` * audit `expect_error()` for `backend-postgres.R` * audit `expect_error()` for `backend-redshift.R` * audit `expect_error()` for `backend-spark-sql.R` also, subclass widely-used error. --- R/translate-sql-helpers.R | 5 +- tests/testthat/_snaps/backend-.md | 8 +++ tests/testthat/_snaps/backend-access.md | 9 +++ tests/testthat/_snaps/backend-mssql.md | 41 ++++++++++++ tests/testthat/_snaps/backend-oracle.md | 16 +++++ tests/testthat/_snaps/backend-postgres.md | 64 +++++++++++++++++-- tests/testthat/_snaps/backend-redshift.md | 32 ++++++++++ tests/testthat/_snaps/backend-spark-sql.md | 32 ++++++++++ tests/testthat/_snaps/backend-sqlite.md | 17 ----- .../testthat/_snaps/translate-sql-helpers.md | 8 +++ tests/testthat/test-backend-.R | 2 +- tests/testthat/test-backend-access.R | 17 +++-- tests/testthat/test-backend-mssql.R | 47 +++++++++++--- tests/testthat/test-backend-oracle.R | 15 ++++- tests/testthat/test-backend-postgres.R | 56 ++++++++++------ tests/testthat/test-backend-redshift.R | 33 +++++++--- tests/testthat/test-backend-spark-sql.R | 25 ++++++-- tests/testthat/test-backend-sqlite.R | 13 ++-- tests/testthat/test-translate-sql-helpers.R | 4 ++ 19 files changed, 364 insertions(+), 80 deletions(-) create mode 100644 tests/testthat/_snaps/backend-spark-sql.md diff --git a/R/translate-sql-helpers.R b/R/translate-sql-helpers.R index a26e7c4f7..c9c120e60 100644 --- a/R/translate-sql-helpers.R +++ b/R/translate-sql-helpers.R @@ -273,7 +273,10 @@ sql_not_supported <- function(f) { check_string(f) function(...) { - cli_abort("{.fun {f}} is not available in this SQL variant.") + cli_abort( + "{.fun {f}} is not available in this SQL variant.", + class = "dbplyr_error_unsupported_fn" + ) } } diff --git a/tests/testthat/_snaps/backend-.md b/tests/testthat/_snaps/backend-.md index b2e003bc5..aab4ac1e7 100644 --- a/tests/testthat/_snaps/backend-.md +++ b/tests/testthat/_snaps/backend-.md @@ -1,3 +1,11 @@ +# basic arithmetic is correct + + Code + test_translate_sql(100L %/% 3L) + Condition + Error in `100L %/% 3L`: + ! `%/%()` is not available in this SQL variant. + # can translate subsetting Code diff --git a/tests/testthat/_snaps/backend-access.md b/tests/testthat/_snaps/backend-access.md index a25020126..534240bbe 100644 --- a/tests/testthat/_snaps/backend-access.md +++ b/tests/testthat/_snaps/backend-access.md @@ -1,3 +1,12 @@ +# custom scalar translated correctly + + Code + test_translate_sql(paste(x, collapse = "-")) + Condition + Error in `check_collapse()`: + ! `collapse` not supported in DB translation of `paste()`. + i Please use `str_flatten()` instead. + # queries translate correctly Code diff --git a/tests/testthat/_snaps/backend-mssql.md b/tests/testthat/_snaps/backend-mssql.md index f17f8175c..36d772c8d 100644 --- a/tests/testthat/_snaps/backend-mssql.md +++ b/tests/testthat/_snaps/backend-mssql.md @@ -32,6 +32,47 @@ ! `abbr = TRUE` isn't supported in SQL Server translation. i It must be FALSE instead. +--- + + Code + test_translate_sql(quarter(x, fiscal_start = 5)) + Condition + Error in `quarter()`: + ! `fiscal_start = 5` isn't supported in SQL Server translation. + i It must be 1 instead. + +# custom clock functions translated correctly + + Code + test_translate_sql(date_count_between(date_column_1, date_column_2, "year")) + Condition + Error in `date_count_between()`: + ! `precision` must be "day" on SQL backends. + +--- + + Code + test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5)) + Condition + Error in `date_count_between()`: + ! `n` must be "1" on SQL backends. + +# difftime is translated correctly + + Code + test_translate_sql(difftime(start_date, end_date, units = "auto")) + Condition + Error in `difftime()`: + ! The only supported value for `units` on SQL backends is "days" + +--- + + Code + test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) + Condition + Error in `difftime()`: + ! The `tz` argument is not supported for SQL backends. + # convert between bit and boolean as needed Code diff --git a/tests/testthat/_snaps/backend-oracle.md b/tests/testthat/_snaps/backend-oracle.md index aa17cd6b4..a5d249c6f 100644 --- a/tests/testthat/_snaps/backend-oracle.md +++ b/tests/testthat/_snaps/backend-oracle.md @@ -137,3 +137,19 @@ SELECT 1, '{1,2,3}' FROM DUAL ) `values_table` +# difftime is translated correctly + + Code + test_translate_sql(difftime(start_date, end_date, units = "auto")) + Condition + Error in `difftime()`: + ! The only supported value for `units` on SQL backends is "days" + +--- + + Code + test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) + Condition + Error in `difftime()`: + ! The `tz` argument is not supported for SQL backends. + diff --git a/tests/testthat/_snaps/backend-postgres.md b/tests/testthat/_snaps/backend-postgres.md index b6c98be73..326582bfc 100644 --- a/tests/testthat/_snaps/backend-postgres.md +++ b/tests/testthat/_snaps/backend-postgres.md @@ -1,18 +1,68 @@ +# pasting translated correctly + + Code + test_translate_sql(paste0(x, collapse = ""), window = FALSE) + Condition + Error in `check_collapse()`: + ! `collapse` not supported in DB translation of `paste()`. + i Please use `str_flatten()` instead. + +# custom lubridate functions translated correctly + + Code + test_translate_sql(quarter(x, fiscal_start = 2)) + Condition + Error in `quarter()`: + ! `fiscal_start = 2` isn't supported in PostgreSQL translation. + i It must be 1 instead. + +# custom clock functions translated correctly + + Code + test_translate_sql(date_count_between(date_column_1, date_column_2, "year")) + Condition + Error in `date_count_between()`: + ! `precision` must be "day" on SQL backends. + +--- + + Code + test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5)) + Condition + Error in `date_count_between()`: + ! `n` must be "1" on SQL backends. + +# difftime is translated correctly + + Code + test_translate_sql(difftime(start_date, end_date, units = "auto")) + Condition + Error in `difftime()`: + ! The only supported value for `units` on SQL backends is "days" + +--- + + Code + test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) + Condition + Error in `difftime()`: + ! The `tz` argument is not supported for SQL backends. + # custom window functions translated correctly Code - (expect_error(test_translate_sql(quantile(x, 0.3, na.rm = TRUE), window = TRUE)) - ) - Output - + test_translate_sql(quantile(x, 0.3, na.rm = TRUE), window = TRUE) + Condition Error in `quantile()`: ! Translation of `quantile()` in `mutate()` is not supported for PostgreSQL. i Use a combination of `summarise()` and `left_join()` instead: `df %>% left_join(summarise( = quantile(x, 0.3, na.rm = TRUE)))`. + +--- + Code - (expect_error(test_translate_sql(median(x, na.rm = TRUE), window = TRUE))) - Output - + test_translate_sql(median(x, na.rm = TRUE), window = TRUE) + Condition Error in `median()`: ! Translation of `median()` in `mutate()` is not supported for PostgreSQL. i Use a combination of `summarise()` and `left_join()` instead: diff --git a/tests/testthat/_snaps/backend-redshift.md b/tests/testthat/_snaps/backend-redshift.md index e41a1943c..b85f9ec96 100644 --- a/tests/testthat/_snaps/backend-redshift.md +++ b/tests/testthat/_snaps/backend-redshift.md @@ -35,3 +35,35 @@ SELECT 1, '{1,2,3}' ) AS `values_table` +# custom clock functions translated correctly + + Code + test_translate_sql(date_count_between(date_column_1, date_column_2, "year")) + Condition + Error in `date_count_between()`: + ! `precision` must be "day" on SQL backends. + +--- + + Code + test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5)) + Condition + Error in `date_count_between()`: + ! `n` must be "1" on SQL backends. + +# difftime is translated correctly + + Code + test_translate_sql(difftime(start_date, end_date, units = "auto")) + Condition + Error in `difftime()`: + ! The only supported value for `units` on SQL backends is "days" + +--- + + Code + test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) + Condition + Error in `difftime()`: + ! The `tz` argument is not supported for SQL backends. + diff --git a/tests/testthat/_snaps/backend-spark-sql.md b/tests/testthat/_snaps/backend-spark-sql.md new file mode 100644 index 000000000..cc9dc554e --- /dev/null +++ b/tests/testthat/_snaps/backend-spark-sql.md @@ -0,0 +1,32 @@ +# custom clock functions translated correctly + + Code + test_translate_sql(date_count_between(date_column_1, date_column_2, "year")) + Condition + Error in `date_count_between()`: + ! `precision` must be "day" on SQL backends. + +--- + + Code + test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5)) + Condition + Error in `date_count_between()`: + ! `n` must be "1" on SQL backends. + +# difftime is translated correctly + + Code + test_translate_sql(difftime(start_date, end_date, units = "auto")) + Condition + Error in `difftime()`: + ! The only supported value for `units` on SQL backends is "days" + +--- + + Code + test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) + Condition + Error in `difftime()`: + ! The `tz` argument is not supported for SQL backends. + diff --git a/tests/testthat/_snaps/backend-sqlite.md b/tests/testthat/_snaps/backend-sqlite.md index 5090bb042..0b0d3a20a 100644 --- a/tests/testthat/_snaps/backend-sqlite.md +++ b/tests/testthat/_snaps/backend-sqlite.md @@ -1,20 +1,3 @@ -# custom aggregates translated - - Code - (expect_error(test_translate_sql(quantile(x, 0.5, na.rm = TRUE), window = FALSE)) - ) - Output - - Error in `quantile()`: - ! `quantile()` is not available in this SQL variant. - Code - (expect_error(test_translate_sql(quantile(x, 0.5, na.rm = TRUE), window = TRUE)) - ) - Output - - Error in `quantile()`: - ! `quantile()` is not available in this SQL variant. - # custom SQL translation Code diff --git a/tests/testthat/_snaps/translate-sql-helpers.md b/tests/testthat/_snaps/translate-sql-helpers.md index 2e79fd7f2..a2ae044fe 100644 --- a/tests/testthat/_snaps/translate-sql-helpers.md +++ b/tests/testthat/_snaps/translate-sql-helpers.md @@ -1,3 +1,11 @@ +# warns informatively with unsupported function + + Code + sql_not_supported("cor")() + Condition + Error: + ! `cor()` is not available in this SQL variant. + # duplicates throw an error Code diff --git a/tests/testthat/test-backend-.R b/tests/testthat/test-backend-.R index d04ec01db..6b284a44e 100644 --- a/tests/testthat/test-backend-.R +++ b/tests/testthat/test-backend-.R @@ -15,7 +15,7 @@ test_that("basic arithmetic is correct", { expect_equal(test_translate_sql(5 ^ 2), sql("POWER(5.0, 2.0)")) expect_equal(test_translate_sql(100L %% 3L), sql("100 % 3")) - expect_error(test_translate_sql(100L %/% 3L), "not available") + expect_snapshot(error = TRUE, test_translate_sql(100L %/% 3L)) }) test_that("small numbers aren't converted to 0", { diff --git a/tests/testthat/test-backend-access.R b/tests/testthat/test-backend-access.R index a37a29fa0..033b0abb3 100644 --- a/tests/testthat/test-backend-access.R +++ b/tests/testthat/test-backend-access.R @@ -34,7 +34,7 @@ test_that("custom scalar translated correctly", { # paste() expect_equal(test_translate_sql(paste(x, y, sep = "+")), sql("`x` & '+' & `y`")) expect_equal(test_translate_sql(paste0(x, y)), sql("`x` & `y`")) - expect_error(test_translate_sql(paste(x, collapse = "-")),"`collapse` not supported") + expect_snapshot(error = TRUE, test_translate_sql(paste(x, collapse = "-"))) # Logic expect_equal(test_translate_sql(ifelse(x, "true", "false")), sql("IIF(`x`, 'true', 'false')")) }) @@ -45,9 +45,18 @@ test_that("custom aggregators translated correctly", { expect_equal(test_translate_sql(sd(x, na.rm = TRUE), window = FALSE), sql("STDEV(`x`)")) expect_equal(test_translate_sql(var(x, na.rm = TRUE), window = FALSE), sql("VAR(`x`)")) - expect_error(test_translate_sql(cor(x), window = FALSE), "not available") - expect_error(test_translate_sql(cov(x), window = FALSE), "not available") - expect_error(test_translate_sql(n_distinct(x), window = FALSE), "not available") + expect_error( + test_translate_sql(cor(x), window = FALSE), + class = "dbplyr_error_unsupported_fn" + ) + expect_error( + test_translate_sql(cov(x), window = FALSE), + class = "dbplyr_error_unsupported_fn" + ) + expect_error( + test_translate_sql(n_distinct(x), window = FALSE), + class = "dbplyr_error_unsupported_fn" + ) }) test_that("custom escaping works as expected", { diff --git a/tests/testthat/test-backend-mssql.R b/tests/testthat/test-backend-mssql.R index 682ca1b1a..3410d5502 100644 --- a/tests/testthat/test-backend-mssql.R +++ b/tests/testthat/test-backend-mssql.R @@ -34,8 +34,14 @@ test_that("custom scalar translated correctly", { sql("IIF(`x`, 'true', 'false')") ) - expect_error(test_translate_sql(bitwShiftL(x, 2L)), sql("not available")) - expect_error(test_translate_sql(bitwShiftR(x, 2L)), sql("not available")) + expect_error( + test_translate_sql(bitwShiftL(x, 2L)), + class = "dbplyr_error_unsupported_fn" + ) + expect_error( + test_translate_sql(bitwShiftR(x, 2L)), + class = "dbplyr_error_unsupported_fn" + ) }) test_that("contents of [ have bool context", { @@ -57,8 +63,14 @@ test_that("custom aggregators translated correctly", { expect_equal(test_translate_sql(sd(x, na.rm = TRUE), window = FALSE), sql("STDEV(`x`)")) expect_equal(test_translate_sql(var(x, na.rm = TRUE), window = FALSE), sql("VAR(`x`)")) - expect_error(test_translate_sql(cor(x), window = FALSE), "not available") - expect_error(test_translate_sql(cov(x), window = FALSE), "not available") + expect_error( + test_translate_sql(cor(x), window = FALSE), + class = "dbplyr_error_unsupported_fn" + ) + expect_error( + test_translate_sql(cov(x), window = FALSE), + class = "dbplyr_error_unsupported_fn" + ) expect_equal(test_translate_sql(str_flatten(x), window = FALSE), sql("STRING_AGG(`x`, '')")) expect_snapshot(error = TRUE, { @@ -126,14 +138,17 @@ test_that("custom lubridate functions translated correctly", { expect_equal(test_translate_sql(quarter(x)), sql("DATEPART(QUARTER, `x`)")) expect_equal(test_translate_sql(quarter(x, with_year = TRUE)), sql("(DATENAME(YEAR, `x`) + '.' + DATENAME(QUARTER, `x`))")) - expect_error(test_translate_sql(quarter(x, fiscal_start = 5))) + expect_snapshot(error = TRUE, test_translate_sql(quarter(x, fiscal_start = 5))) }) test_that("custom clock functions translated correctly", { local_con(simulate_mssql()) expect_equal(test_translate_sql(add_years(x, 1)), sql("DATEADD(YEAR, 1.0, `x`)")) expect_equal(test_translate_sql(add_days(x, 1)), sql("DATEADD(DAY, 1.0, `x`)")) - expect_error(test_translate_sql(add_days(x, 1, "dots", "must", "be empty"))) + expect_error( + test_translate_sql(add_days(x, 1, "dots", "must", "be empty")), + class = "rlib_error_dots_nonempty" + ) expect_equal(test_translate_sql(date_build(2020, 1, 1)), sql("DATEFROMPARTS(2020.0, 1.0, 1.0)")) expect_equal(test_translate_sql(date_build(year_column, 1L, 1L)), sql("DATEFROMPARTS(`year_column`, 1, 1)")) expect_equal(test_translate_sql(get_year(date_column)), sql("DATEPART(YEAR, `date_column`)")) @@ -141,8 +156,14 @@ test_that("custom clock functions translated correctly", { expect_equal(test_translate_sql(get_day(date_column)), sql("DATEPART(DAY, `date_column`)")) expect_equal(test_translate_sql(date_count_between(date_column_1, date_column_2, "day")), sql("DATEDIFF(DAY, `date_column_1`, `date_column_2`)")) - expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "year"))) - expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5))) + expect_snapshot( + error = TRUE, + test_translate_sql(date_count_between(date_column_1, date_column_2, "year")) + ) + expect_snapshot( + error = TRUE, + test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5)) + ) }) test_that("difftime is translated correctly", { @@ -150,8 +171,14 @@ test_that("difftime is translated correctly", { expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("DATEDIFF(DAY, `end_date`, `start_date`)")) expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("DATEDIFF(DAY, `end_date`, `start_date`)")) - expect_error(test_translate_sql(difftime(start_date, end_date, units = "auto"))) - expect_error(test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days"))) + expect_snapshot( + error = TRUE, + test_translate_sql(difftime(start_date, end_date, units = "auto")) + ) + expect_snapshot( + error = TRUE, + test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) + ) }) test_that("last_value_sql() translated correctly", { diff --git a/tests/testthat/test-backend-oracle.R b/tests/testthat/test-backend-oracle.R index 1ab5462c6..fa3c26928 100644 --- a/tests/testthat/test-backend-oracle.R +++ b/tests/testthat/test-backend-oracle.R @@ -87,7 +87,10 @@ test_that("custom clock functions translated correctly", { local_con(simulate_oracle()) expect_equal(test_translate_sql(add_years(x, 1)), sql("(`x` + NUMTODSINTERVAL(1.0 * 365.25, 'day'))")) expect_equal(test_translate_sql(add_days(x, 1)), sql("(`x` + NUMTODSINTERVAL(1.0, 'day'))")) - expect_error(test_translate_sql(add_days(x, 1, "dots", "must", "be empty"))) + expect_error( + test_translate_sql(add_days(x, 1, "dots", "must", "be empty")), + class = "rlib_error_dots_nonempty" + ) }) test_that("difftime is translated correctly", { @@ -95,6 +98,12 @@ test_that("difftime is translated correctly", { expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("CEIL(CAST(`end_date` AS DATE) - CAST(`start_date` AS DATE))")) expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("CEIL(CAST(`end_date` AS DATE) - CAST(`start_date` AS DATE))")) - expect_error(test_translate_sql(difftime(start_date, end_date, units = "auto"))) - expect_error(test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days"))) + expect_snapshot( + error = TRUE, + test_translate_sql(difftime(start_date, end_date, units = "auto")) + ) + expect_snapshot( + error = TRUE, + test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) + ) }) diff --git a/tests/testthat/test-backend-postgres.R b/tests/testthat/test-backend-postgres.R index 5ae2497ae..b00403d26 100644 --- a/tests/testthat/test-backend-postgres.R +++ b/tests/testthat/test-backend-postgres.R @@ -52,7 +52,10 @@ test_that("pasting translated correctly", { expect_equal(test_translate_sql(paste(x, y), window = FALSE), sql("CONCAT_WS(' ', `x`, `y`)")) expect_equal(test_translate_sql(paste0(x, y), window = FALSE), sql("CONCAT_WS('', `x`, `y`)")) - expect_error(test_translate_sql(paste0(x, collapse = ""), window = FALSE), "`collapse` not supported") + expect_snapshot( + error = TRUE, + test_translate_sql(paste0(x, collapse = ""), window = FALSE) + ) }) test_that("postgres mimics two argument log", { @@ -73,7 +76,7 @@ test_that("custom lubridate functions translated correctly", { expect_equal(test_translate_sql(isoweek(x)), sql("EXTRACT(WEEK FROM `x`)")) expect_equal(test_translate_sql(quarter(x)), sql("EXTRACT(QUARTER FROM `x`)")) expect_equal(test_translate_sql(quarter(x, with_year = TRUE)), sql("(EXTRACT(YEAR FROM `x`) || '.' || EXTRACT(QUARTER FROM `x`))")) - expect_error(test_translate_sql(quarter(x, fiscal_start = 2))) + expect_snapshot(error = TRUE, test_translate_sql(quarter(x, fiscal_start = 2))) expect_equal(test_translate_sql(isoyear(x)), sql("EXTRACT(YEAR FROM `x`)")) expect_equal(test_translate_sql(seconds(x)), sql("CAST('`x` seconds' AS INTERVAL)")) @@ -92,7 +95,10 @@ test_that("custom clock functions translated correctly", { local_con(simulate_postgres()) expect_equal(test_translate_sql(add_years(x, 1)), sql("(`x` + 1.0*INTERVAL'1 year')")) expect_equal(test_translate_sql(add_days(x, 1)), sql("(`x` + 1.0*INTERVAL'1 day')")) - expect_error(test_translate_sql(add_days(x, 1, "dots", "must", "be empty"))) + expect_error( + test_translate_sql(add_days(x, 1, "dots", "must", "be empty")), + class = "rlib_error_dots_nonempty" + ) expect_equal(test_translate_sql(date_build(2020, 1, 1)), sql("MAKE_DATE(2020.0, 1.0, 1.0)")) expect_equal(test_translate_sql(date_build(year_column, 1L, 1L)), sql("MAKE_DATE(`year_column`, 1, 1)")) expect_equal(test_translate_sql(get_year(date_column)), sql("DATE_PART('year', `date_column`)")) @@ -100,8 +106,14 @@ test_that("custom clock functions translated correctly", { expect_equal(test_translate_sql(get_day(date_column)), sql("DATE_PART('day', `date_column`)")) expect_equal(test_translate_sql(date_count_between(date_column_1, date_column_2, "day")), sql("`date_column_2` - `date_column_1`")) - expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "year"))) - expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5))) + expect_snapshot( + error = TRUE, + test_translate_sql(date_count_between(date_column_1, date_column_2, "year")) + ) + expect_snapshot( + error = TRUE, + test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5)) + ) }) test_that("difftime is translated correctly", { @@ -109,17 +121,27 @@ test_that("difftime is translated correctly", { expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("(CAST(`start_date` AS DATE) - CAST(`end_date` AS DATE))")) expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("(CAST(`start_date` AS DATE) - CAST(`end_date` AS DATE))")) - expect_error(test_translate_sql(difftime(start_date, end_date, units = "auto"))) - expect_error(test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days"))) + expect_snapshot( + error = TRUE, + test_translate_sql(difftime(start_date, end_date, units = "auto")) + ) + expect_snapshot( + error = TRUE, + test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) + ) }) test_that("custom window functions translated correctly", { local_con(simulate_postgres()) - expect_snapshot({ - (expect_error(test_translate_sql(quantile(x, 0.3, na.rm = TRUE), window = TRUE))) - (expect_error(test_translate_sql(median(x, na.rm = TRUE), window = TRUE))) - }) + expect_snapshot( + error = TRUE, + test_translate_sql(quantile(x, 0.3, na.rm = TRUE), window = TRUE) + ) + expect_snapshot( + error = TRUE, + test_translate_sql(median(x, na.rm = TRUE), window = TRUE) + ) }) test_that("custom SQL translation", { @@ -202,7 +224,7 @@ test_that("can overwrite temp tables", { src <- src_test("postgres") copy_to(src, mtcars, "mtcars", overwrite = TRUE) withr::defer(DBI::dbRemoveTable(src, "mtcars")) - expect_error(copy_to(src, mtcars, "mtcars", overwrite = TRUE), NA) + expect_no_error(copy_to(src, mtcars, "mtcars", overwrite = TRUE)) }) test_that("copy_inline works", { @@ -241,7 +263,7 @@ test_that("can insert with returning", { ) }, transform = snap_transform_dbi) - expect_error( + expect_no_error( rows_insert( x, y, by = c("a", "b"), @@ -249,8 +271,7 @@ test_that("can insert with returning", { conflict = "ignore", returning = everything(), method = "where_not_exists" - ), - NA + ) ) x <- local_db_table(con, df_x, "df_x2", overwrite = TRUE) @@ -372,15 +393,14 @@ test_that("can upsert with returning", { }, transform = snap_transform_dbi) # DBI method does not need a unique index - expect_error( + expect_no_error( rows_upsert( x, y, by = c("a", "b"), in_place = TRUE, returning = everything(), method = "cte_update" - ), - NA + ) ) x <- local_db_table(con, df_x, "df_x2") diff --git a/tests/testthat/test-backend-redshift.R b/tests/testthat/test-backend-redshift.R index 3ee191d8f..a879d46ff 100644 --- a/tests/testthat/test-backend-redshift.R +++ b/tests/testthat/test-backend-redshift.R @@ -6,7 +6,10 @@ test_that("defaults to postgres translations", { test_that("string translations", { local_con(simulate_redshift()) - expect_error(test_translate_sql(str_replace("xx", ".", "a")), "not available") + expect_error( + test_translate_sql(str_replace("xx", ".", "a")), + class = "dbplyr_error_unsupported_fn" + ) expect_equal(test_translate_sql(str_replace_all("xx", ".", "a")), sql("REGEXP_REPLACE('xx', '.', 'a')")) expect_equal(test_translate_sql(substr(x, 2, 2)), sql("SUBSTRING(`x`, 2, 1)")) @@ -38,9 +41,6 @@ test_that("lag and lead translation", { expect_equal(test_translate_sql(lead(x)), sql("LEAD(`x`, 1) OVER ()")) expect_equal(test_translate_sql(lag(x)), sql("LAG(`x`, 1) OVER ()")) - - expect_error(test_translate_sql(lead(x, default = y)), "unused argument") - expect_error(test_translate_sql(lag(x, default = y)), "unused argument") }) test_that("copy_inline uses UNION ALL", { @@ -62,7 +62,10 @@ test_that("custom clock functions translated correctly", { local_con(simulate_redshift()) expect_equal(test_translate_sql(add_years(x, 1)), sql("DATEADD(YEAR, 1.0, `x`)")) expect_equal(test_translate_sql(add_days(x, 1)), sql("DATEADD(DAY, 1.0, `x`)")) - expect_error(test_translate_sql(add_days(x, 1, "dots", "must", "be empty"))) + expect_error( + test_translate_sql(add_days(x, 1, "dots", "must", "be empty")), + class = "rlib_error_dots_nonempty" + ) expect_equal(test_translate_sql(date_build(2020, 1, 1)), sql("TO_DATE(CAST(2020.0 AS TEXT) || '-' CAST(1.0 AS TEXT) || '-' || CAST(1.0 AS TEXT)), 'YYYY-MM-DD')")) expect_equal(test_translate_sql(date_build(year_column, 1L, 1L)), sql("TO_DATE(CAST(`year_column` AS TEXT) || '-' CAST(1 AS TEXT) || '-' || CAST(1 AS TEXT)), 'YYYY-MM-DD')")) expect_equal(test_translate_sql(get_year(date_column)), sql("DATE_PART('year', `date_column`)")) @@ -70,8 +73,14 @@ test_that("custom clock functions translated correctly", { expect_equal(test_translate_sql(get_day(date_column)), sql("DATE_PART('day', `date_column`)")) expect_equal(test_translate_sql(date_count_between(date_column_1, date_column_2, "day")), sql("DATEDIFF(DAY, `date_column_1`, `date_column_2`)")) - expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "year"))) - expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5))) + expect_snapshot( + error = TRUE, + test_translate_sql(date_count_between(date_column_1, date_column_2, "year")) + ) + expect_snapshot( + error = TRUE, + test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5)) + ) }) test_that("difftime is translated correctly", { @@ -79,6 +88,12 @@ test_that("difftime is translated correctly", { expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("DATEDIFF(DAY, `end_date`, `start_date`)")) expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("DATEDIFF(DAY, `end_date`, `start_date`)")) - expect_error(test_translate_sql(difftime(start_date, end_date, units = "auto"))) - expect_error(test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days"))) + expect_snapshot( + error = TRUE, + test_translate_sql(difftime(start_date, end_date, units = "auto")) + ) + expect_snapshot( + error = TRUE, + test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) + ) }) diff --git a/tests/testthat/test-backend-spark-sql.R b/tests/testthat/test-backend-spark-sql.R index 4a005abf1..cf5ae970e 100644 --- a/tests/testthat/test-backend-spark-sql.R +++ b/tests/testthat/test-backend-spark-sql.R @@ -2,7 +2,10 @@ test_that("custom clock functions translated correctly", { local_con(simulate_spark_sql()) expect_equal(test_translate_sql(add_years(x, 1)), sql("ADD_MONTHS(`x`, 1.0 * 12.0)")) expect_equal(test_translate_sql(add_days(x, 1)), sql("DATE_ADD(`x`, 1.0)")) - expect_error(test_translate_sql(add_days(x, 1, "dots", "must", "be empty"))) + expect_error( + test_translate_sql(add_days(x, 1, "dots", "must", "be empty")), + class = "rlib_error_dots_nonempty" + ) expect_equal(test_translate_sql(date_build(2020, 1, 1)), sql("MAKE_DATE(2020.0, 1.0, 1.0)")) expect_equal(test_translate_sql(date_build(year_column, 1L, 1L)), sql("MAKE_DATE(`year_column`, 1, 1)")) expect_equal(test_translate_sql(get_year(date_column)), sql("DATE_PART('YEAR', `date_column`)")) @@ -10,8 +13,14 @@ test_that("custom clock functions translated correctly", { expect_equal(test_translate_sql(get_day(date_column)), sql("DATE_PART('DAY', `date_column`)")) expect_equal(test_translate_sql(date_count_between(date_column_1, date_column_2, "day")), sql("DATEDIFF(`date_column_2`, `date_column_1`)")) - expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "year"))) - expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5))) + expect_snapshot( + error = TRUE, + test_translate_sql(date_count_between(date_column_1, date_column_2, "year")) + ) + expect_snapshot( + error = TRUE, + test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5)) + ) }) test_that("difftime is translated correctly", { @@ -19,6 +28,12 @@ test_that("difftime is translated correctly", { expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("DATEDIFF(`end_date`, `start_date`)")) expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("DATEDIFF(`end_date`, `start_date`)")) - expect_error(test_translate_sql(difftime(start_date, end_date, units = "auto"))) - expect_error(test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days"))) + expect_snapshot( + error = TRUE, + test_translate_sql(difftime(start_date, end_date, units = "auto")) + ) + expect_snapshot( + error = TRUE, + test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) + ) }) diff --git a/tests/testthat/test-backend-sqlite.R b/tests/testthat/test-backend-sqlite.R index 0422ae955..e41af3996 100644 --- a/tests/testthat/test-backend-sqlite.R +++ b/tests/testthat/test-backend-sqlite.R @@ -37,11 +37,14 @@ test_that("custom aggregates translated", { expect_equal(test_translate_sql(median(x, na.rm = TRUE), window = FALSE), sql('MEDIAN(`x`)')) expect_equal(test_translate_sql(sd(x, na.rm = TRUE), window = FALSE), sql('STDEV(`x`)')) - - expect_snapshot({ - (expect_error(test_translate_sql(quantile(x, 0.5, na.rm = TRUE), window = FALSE))) - (expect_error(test_translate_sql(quantile(x, 0.5, na.rm = TRUE), window = TRUE))) - }) + expect_error( + test_translate_sql(quantile(x, 0.5, na.rm = TRUE), window = FALSE), + class = "dbplyr_error_unsupported_fn" + ) + expect_error( + test_translate_sql(quantile(x, 0.5, na.rm = TRUE), window = TRUE), + class = "dbplyr_error_unsupported_fn" + ) }) test_that("custom SQL translation", { diff --git a/tests/testthat/test-translate-sql-helpers.R b/tests/testthat/test-translate-sql-helpers.R index a6e8f1bae..76d5414ce 100644 --- a/tests/testthat/test-translate-sql-helpers.R +++ b/tests/testthat/test-translate-sql-helpers.R @@ -10,6 +10,10 @@ test_that("aggregation functions warn once if na.rm = FALSE", { expect_warning(sql_mean("x"), NA) }) +test_that("warns informatively with unsupported function", { + expect_snapshot(error = TRUE, sql_not_supported("cor")()) +}) + test_that("missing window functions create a warning", { local_con(simulate_dbi()) sim_scalar <- sql_translator() From b1e8bab229aea5ab77557701ee4bbe3d41094bc5 Mon Sep 17 00:00:00 2001 From: "Simon P. Couch" Date: Fri, 1 Nov 2024 08:43:24 -0500 Subject: [PATCH 12/17] use type checkers in `backend-snowflake.R` (#1554) --- NEWS.md | 5 ++++ R/backend-snowflake.R | 30 ++++++++++++---------- tests/testthat/_snaps/backend-snowflake.md | 16 ------------ tests/testthat/test-backend-snowflake.R | 22 ++++++++++------ 4 files changed, 35 insertions(+), 38 deletions(-) diff --git a/NEWS.md b/NEWS.md index e08ad2d69..00d832e34 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,5 +1,10 @@ # dbplyr (development version) +* Tightened argument checks for Snowflake SQL translations. These changes should + result in more informative errors in cases where code already failed; if you + see errors with code that used to run without issue, please report them to + the package authors (@simonpcouch, #1554). + * `clock::add_years()` translates to correct SQL on Spark (@ablack3, #1510). * Translations for `as.double()` and `as.character()` with Teradata previously diff --git a/R/backend-snowflake.R b/R/backend-snowflake.R index 8219da421..af132d6af 100644 --- a/R/backend-snowflake.R +++ b/R/backend-snowflake.R @@ -27,6 +27,7 @@ sql_translation.Snowflake <- function(con) { paste = snowflake_paste(" "), paste0 = snowflake_paste(""), str_c = function(..., sep = "", collapse = NULL) { + check_string(sep) if (!is.null(collapse)) { cli_abort(c( "{.arg collapse} not supported in DB translation of {.fn str_c}.", @@ -40,6 +41,7 @@ sql_translation.Snowflake <- function(con) { }, str_detect = function(string, pattern, negate = FALSE) { con <- sql_current_con() + check_bool(negate) # Snowflake needs backslashes escaped, so we must increase the level of escaping pattern <- gsub("\\", "\\\\", pattern, fixed = TRUE) @@ -51,6 +53,7 @@ sql_translation.Snowflake <- function(con) { }, str_starts = function(string, pattern, negate = FALSE) { con <- sql_current_con() + check_bool(negate) # Snowflake needs backslashes escaped, so we must increase the level of escaping pattern <- gsub("\\", "\\\\", pattern, fixed = TRUE) @@ -62,6 +65,7 @@ sql_translation.Snowflake <- function(con) { }, str_ends = function(string, pattern, negate = FALSE) { con <- sql_current_con() + check_bool(negate) # Snowflake needs backslashes escaped, so we must increase the level of escaping pattern <- gsub("\\", "\\\\", pattern, fixed = TRUE) @@ -111,6 +115,9 @@ sql_translation.Snowflake <- function(con) { sql_expr(EXTRACT(DAY %FROM% !!x)) }, wday = function(x, label = FALSE, abbr = TRUE, week_start = NULL) { + check_bool(label) + check_bool(abbr) + check_number_whole(week_start, allow_null = TRUE) if (!label) { week_start <- week_start %||% getOption("lubridate.week.start", 7) offset <- as.integer(7 - week_start) @@ -140,6 +147,8 @@ sql_translation.Snowflake <- function(con) { }, isoweek = function(x) sql_expr(EXTRACT("weekiso", !!x)), month = function(x, label = FALSE, abbr = TRUE) { + check_bool(label) + check_bool(abbr) if (!label) { sql_expr(EXTRACT("month", !!x)) } else { @@ -167,6 +176,7 @@ sql_translation.Snowflake <- function(con) { } }, quarter = function(x, with_year = FALSE, fiscal_start = 1) { + check_bool(with_year) check_unsupported_arg(fiscal_start, 1) if (with_year) { @@ -220,6 +230,8 @@ sql_translation.Snowflake <- function(con) { sql_expr(DATEADD(YEAR, !!n, !!x)) }, date_build = function(year, month = 1L, day = 1L, ..., invalid = NULL) { + check_dots_empty() + check_unsupported_arg(invalid, allowed = NULL) # https://docs.snowflake.com/en/sql-reference/functions/date_from_parts sql_expr(DATE_FROM_PARTS(!!year, !!month, !!day)) }, @@ -235,25 +247,15 @@ sql_translation.Snowflake <- function(con) { date_count_between = function(start, end, precision, ..., n = 1L){ check_dots_empty() - if (precision != "day") { - cli_abort("{.arg precision} must be {.val day} on SQL backends.") - } - if (n != 1) { - cli_abort("{.arg n} must be {.val 1} on SQL backends.") - } + check_unsupported_arg(precision, allowed = "day") + check_unsupported_arg(n, allowed = 1L) sql_expr(DATEDIFF(DAY, !!start, !!end)) }, difftime = function(time1, time2, tz, units = "days") { - - if (!missing(tz)) { - cli::cli_abort("The {.arg tz} argument is not supported for SQL backends.") - } - - if (units[1] != "days") { - cli::cli_abort('The only supported value for {.arg units} on SQL backends is "days"') - } + check_unsupported_arg(tz) + check_unsupported_arg(units, allowed = "days") sql_expr(DATEDIFF(DAY, !!time2, !!time1)) }, diff --git a/tests/testthat/_snaps/backend-snowflake.md b/tests/testthat/_snaps/backend-snowflake.md index 1c3ed964a..ccb978075 100644 --- a/tests/testthat/_snaps/backend-snowflake.md +++ b/tests/testthat/_snaps/backend-snowflake.md @@ -7,22 +7,6 @@ ! `collapse` not supported in DB translation of `paste()`. i Please use `str_flatten()` instead. -# difftime is translated correctly - - Code - test_translate_sql(difftime(start_date, end_date, units = "auto")) - Condition - Error in `difftime()`: - ! The only supported value for `units` on SQL backends is "days" - ---- - - Code - test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) - Condition - Error in `difftime()`: - ! The `tz` argument is not supported for SQL backends. - # pmin() and pmax() respect na.rm Code diff --git a/tests/testthat/test-backend-snowflake.R b/tests/testthat/test-backend-snowflake.R index a865587cf..467c1cc12 100644 --- a/tests/testthat/test-backend-snowflake.R +++ b/tests/testthat/test-backend-snowflake.R @@ -120,8 +120,14 @@ test_that("custom clock functions translated correctly", { expect_equal(test_translate_sql(get_day(date_column)), sql("DATE_PART(DAY, `date_column`)")) expect_equal(test_translate_sql(date_count_between(date_column_1, date_column_2, "day")), sql("DATEDIFF(DAY, `date_column_1`, `date_column_2`)")) - expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "year"))) - expect_error(test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5))) + expect_error( + test_translate_sql(date_count_between(date_column_1, date_column_2, "year")), + class = "dbplyr_error_unsupported_arg" + ) + expect_error( + test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5)), + class = "dbplyr_error_unsupported_arg" + ) }) test_that("difftime is translated correctly", { @@ -129,13 +135,13 @@ test_that("difftime is translated correctly", { expect_equal(test_translate_sql(difftime(start_date, end_date, units = "days")), sql("DATEDIFF(DAY, `end_date`, `start_date`)")) expect_equal(test_translate_sql(difftime(start_date, end_date)), sql("DATEDIFF(DAY, `end_date`, `start_date`)")) - expect_snapshot( - error = TRUE, - test_translate_sql(difftime(start_date, end_date, units = "auto")) + expect_error( + test_translate_sql(difftime(start_date, end_date, units = "auto")), + class = "dbplyr_error_unsupported_arg" ) - expect_snapshot( - error = TRUE, - test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) + expect_error( + test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")), + class = "dbplyr_error_unsupported_arg" ) }) From d8549864c2ae86d55fe043fa6baff76dbf52ba2f Mon Sep 17 00:00:00 2001 From: "Simon P. Couch" Date: Tue, 12 Nov 2024 10:57:21 -0600 Subject: [PATCH 13/17] use type checkers in `backend-*.R` files (#1556) --- NEWS.md | 8 +++--- R/backend-.R | 2 ++ R/backend-hive.R | 1 + R/backend-mssql.R | 33 ++++++++++------------ R/backend-postgres.R | 28 +++++++++--------- R/backend-redshift.R | 20 ++++--------- R/backend-spark-sql.R | 23 +++++---------- R/backend-teradata.R | 2 ++ R/utils.R | 1 + tests/testthat/_snaps/backend-mssql.md | 25 +++++----------- tests/testthat/_snaps/backend-postgres.md | 11 +++++--- tests/testthat/_snaps/backend-redshift.md | 11 +++++--- tests/testthat/_snaps/backend-spark-sql.md | 11 +++++--- 13 files changed, 80 insertions(+), 96 deletions(-) diff --git a/NEWS.md b/NEWS.md index 00d832e34..78ff3aa75 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,9 +1,9 @@ # dbplyr (development version) -* Tightened argument checks for Snowflake SQL translations. These changes should - result in more informative errors in cases where code already failed; if you - see errors with code that used to run without issue, please report them to - the package authors (@simonpcouch, #1554). +* Tightened argument checks for SQL translations. These changes should + result in more informative errors in cases where code already failed, possibly + silently; if you see errors with code that used to run correctly, please report + them to the package authors (@simonpcouch, #1554, #1555). * `clock::add_years()` translates to correct SQL on Spark (@ablack3, #1510). diff --git a/R/backend-.R b/R/backend-.R index 3557e252d..8c0d6f8cc 100644 --- a/R/backend-.R +++ b/R/backend-.R @@ -252,6 +252,7 @@ base_scalar <- sql_translator( # base R nchar = sql_prefix("LENGTH", 1), nzchar = function(x, keepNA = FALSE) { + check_bool(keepNA) if (keepNA) { exp <- expr(!!x != "") translate_sql(!!exp, con = sql_current_con()) @@ -281,6 +282,7 @@ base_scalar <- sql_translator( str_c = sql_paste(""), str_sub = sql_str_sub("SUBSTR"), str_like = function(string, pattern, ignore_case = TRUE) { + check_bool(ignore_case) if (isTRUE(ignore_case)) { sql_expr(!!string %LIKE% !!pattern) } else { diff --git a/R/backend-hive.R b/R/backend-hive.R index 7dbdb205a..4ae9a7cfb 100644 --- a/R/backend-hive.R +++ b/R/backend-hive.R @@ -94,6 +94,7 @@ sql_table_analyze.Hive <- function(con, table, ...) { #' @export sql_query_set_op.Hive <- function(con, x, y, method, ..., all = FALSE, lvl = 0) { + check_bool(all) # parentheses are not allowed method <- paste0(method, if (all) " ALL") glue_sql2( diff --git a/R/backend-mssql.R b/R/backend-mssql.R index ae70b7b86..af30d2859 100644 --- a/R/backend-mssql.R +++ b/R/backend-mssql.R @@ -107,11 +107,15 @@ simulate_mssql <- function(version = "15.0") { conflict = c("error", "ignore"), returning_cols = NULL, method = NULL) { - method <- method %||% "where_not_exists" - arg_match(method, "where_not_exists", error_arg = "method") # https://stackoverflow.com/questions/25969/insert-into-values-select-from conflict <- rows_check_conflict(conflict) + check_character(returning_cols, allow_null = TRUE) + + check_string(method, allow_null = TRUE) + method <- method %||% "where_not_exists" + arg_match(method, "where_not_exists", error_arg = "method") + parts <- rows_insert_prep(con, table, from, insert_cols, by, lvl = 0) clauses <- list2( @@ -177,6 +181,7 @@ simulate_mssql <- function(version = "15.0") { ..., returning_cols = NULL, method = NULL) { + check_string(method, allow_null = TRUE) method <- method %||% "merge" arg_match(method, "merge", error_arg = "method") @@ -333,6 +338,7 @@ simulate_mssql <- function(version = "15.0") { second = function(x) sql_expr(DATEPART(SECOND, !!x)), month = function(x, label = FALSE, abbr = TRUE) { + check_bool(label) if (!label) { sql_expr(DATEPART(MONTH, !!x)) } else { @@ -342,6 +348,7 @@ simulate_mssql <- function(version = "15.0") { }, quarter = function(x, with_year = FALSE, fiscal_start = 1) { + check_bool(with_year) check_unsupported_arg(fiscal_start, 1, backend = "SQL Server") if (with_year) { @@ -361,6 +368,7 @@ simulate_mssql <- function(version = "15.0") { sql_expr(DATEADD(YEAR, !!n, !!x)) }, date_build = function(year, month = 1L, day = 1L, ..., invalid = NULL) { + check_unsupported_arg(invalid, allow_null = TRUE) sql_expr(DATEFROMPARTS(!!year, !!month, !!day)) }, get_year = function(x) { @@ -373,27 +381,16 @@ simulate_mssql <- function(version = "15.0") { sql_expr(DATEPART(DAY, !!x)) }, date_count_between = function(start, end, precision, ..., n = 1L){ - check_dots_empty() - if (precision != "day") { - cli_abort("{.arg precision} must be {.val day} on SQL backends.") - } - if (n != 1) { - cli_abort("{.arg n} must be {.val 1} on SQL backends.") - } + check_unsupported_arg(precision, allowed = "day") + check_unsupported_arg(n, allowed = 1L) sql_expr(DATEDIFF(DAY, !!start, !!end)) }, difftime = function(time1, time2, tz, units = "days") { - - if (!missing(tz)) { - cli::cli_abort("The {.arg tz} argument is not supported for SQL backends.") - } - - if (units[1] != "days") { - cli::cli_abort('The only supported value for {.arg units} on SQL backends is "days"') - } + check_unsupported_arg(tz) + check_unsupported_arg(units, allowed = "days") sql_expr(DATEDIFF(DAY, !!time2, !!time1)) } @@ -545,7 +542,7 @@ mssql_version <- function(con) { #' @export `sql_returning_cols.Microsoft SQL Server` <- function(con, cols, table, ...) { - stopifnot(table %in% c("DELETED", "INSERTED")) + arg_match(table, values = c("DELETED", "INSERTED")) returning_cols <- sql_named_cols(con, cols, table = table) sql_clause("OUTPUT", returning_cols) diff --git a/R/backend-postgres.R b/R/backend-postgres.R index a766149c3..824f886bb 100644 --- a/R/backend-postgres.R +++ b/R/backend-postgres.R @@ -57,6 +57,7 @@ postgres_grepl <- function(pattern, check_unsupported_arg(perl, FALSE, backend = "PostgreSQL") check_unsupported_arg(fixed, FALSE, backend = "PostgreSQL") check_unsupported_arg(useBytes, FALSE, backend = "PostgreSQL") + check_bool(ignore.case) if (ignore.case) { sql_expr(((!!x)) %~*% ((!!pattern))) @@ -123,6 +124,7 @@ sql_translation.PqConnection <- function(con) { }, # https://www.postgresql.org/docs/current/functions-matching.html str_like = function(string, pattern, ignore_case = TRUE) { + check_bool(ignore_case) if (isTRUE(ignore_case)) { sql_expr(!!string %ILIKE% !!pattern) } else { @@ -162,6 +164,9 @@ sql_translation.PqConnection <- function(con) { sql_expr(EXTRACT(DAY %FROM% !!x)) }, wday = function(x, label = FALSE, abbr = TRUE, week_start = NULL) { + check_bool(label) + check_bool(abbr) + check_number_whole(week_start, allow_null = TRUE) if (!label) { week_start <- week_start %||% getOption("lubridate.week.start", 7) offset <- as.integer(7 - week_start) @@ -182,6 +187,8 @@ sql_translation.PqConnection <- function(con) { sql_expr(EXTRACT(WEEK %FROM% !!x)) }, month = function(x, label = FALSE, abbr = TRUE) { + check_bool(label) + check_bool(abbr) if (!label) { sql_expr(EXTRACT(MONTH %FROM% !!x)) } else { @@ -193,6 +200,7 @@ sql_translation.PqConnection <- function(con) { } }, quarter = function(x, with_year = FALSE, fiscal_start = 1) { + check_bool(with_year) check_unsupported_arg(fiscal_start, 1, backend = "PostgreSQL") if (with_year) { @@ -246,17 +254,14 @@ sql_translation.PqConnection <- function(con) { glue_sql2(sql_current_con(), "({.col x} + {.val n}*INTERVAL'1 year')") }, date_build = function(year, month = 1L, day = 1L, ..., invalid = NULL) { + check_unsupported_arg(invalid, allow_null = TRUE) sql_expr(make_date(!!year, !!month, !!day)) }, date_count_between = function(start, end, precision, ..., n = 1L){ check_dots_empty() - if (precision != "day") { - cli_abort("{.arg precision} must be {.val day} on SQL backends.") - } - if (n != 1) { - cli_abort("{.arg n} must be {.val 1} on SQL backends.") - } + check_unsupported_arg(precision, allowed = "day") + check_unsupported_arg(n, allowed = 1L) sql_expr(!!end - !!start) }, @@ -272,13 +277,8 @@ sql_translation.PqConnection <- function(con) { difftime = function(time1, time2, tz, units = "days") { - if (!missing(tz)) { - cli::cli_abort("The {.arg tz} argument is not supported for SQL backends.") - } - - if (units[1] != "days") { - cli::cli_abort('The only supported value for {.arg units} on SQL backends is "days"') - } + check_unsupported_arg(tz) + check_unsupported_arg(units, allowed = "days") sql_expr((CAST(!!time1 %AS% DATE) - CAST(!!time2 %AS% DATE))) }, @@ -344,6 +344,7 @@ sql_query_insert.PqConnection <- function(con, ..., returning_cols = NULL, method = NULL) { + check_string(method, allow_null = TRUE) method <- method %||% "on_conflict" arg_match(method, c("on_conflict", "where_not_exists"), error_arg = "method") if (method == "where_not_exists") { @@ -379,6 +380,7 @@ sql_query_upsert.PqConnection <- function(con, ..., returning_cols = NULL, method = NULL) { + check_string(method, allow_null = TRUE) method <- method %||% "on_conflict" arg_match(method, c("cte_update", "on_conflict"), error_arg = "method") diff --git a/R/backend-redshift.R b/R/backend-redshift.R index d02d4a1a9..e9224c588 100644 --- a/R/backend-redshift.R +++ b/R/backend-redshift.R @@ -72,6 +72,7 @@ sql_translation.RedshiftConnection <- function(con) { sql_expr(DATEADD(YEAR, !!n, !!x)) }, date_build = function(year, month = 1L, day = 1L, ..., invalid = NULL) { + check_unsupported_arg(invalid, allow_null = TRUE) glue_sql2(sql_current_con(), "TO_DATE(CAST({.val year} AS TEXT) || '-' CAST({.val month} AS TEXT) || '-' || CAST({.val day} AS TEXT)), 'YYYY-MM-DD')") }, get_year = function(x) { @@ -84,27 +85,16 @@ sql_translation.RedshiftConnection <- function(con) { sql_expr(DATE_PART('day', !!x)) }, date_count_between = function(start, end, precision, ..., n = 1L){ - check_dots_empty() - if (precision != "day") { - cli_abort("{.arg precision} must be {.val day} on SQL backends.") - } - if (n != 1) { - cli_abort("{.arg n} must be {.val 1} on SQL backends.") - } + check_unsupported_arg(precision, allowed = "day") + check_unsupported_arg(n, allowed = 1L) sql_expr(DATEDIFF(DAY, !!start, !!end)) }, difftime = function(time1, time2, tz, units = "days") { - - if (!missing(tz)) { - cli::cli_abort("The {.arg tz} argument is not supported for SQL backends.") - } - - if (units[1] != "days") { - cli::cli_abort('The only supported value for {.arg units} on SQL backends is "days"') - } + check_unsupported_arg(tz) + check_unsupported_arg(units, allowed = "days") sql_expr(DATEDIFF(DAY, !!time2, !!time1)) } diff --git a/R/backend-spark-sql.R b/R/backend-spark-sql.R index 88de3f7a5..4ab1b1df6 100644 --- a/R/backend-spark-sql.R +++ b/R/backend-spark-sql.R @@ -47,6 +47,7 @@ simulate_spark_sql <- function() simulate_dbi("Spark SQL") sql_expr(add_months(!!x, !!n*12)) }, date_build = function(year, month = 1L, day = 1L, ..., invalid = NULL) { + check_unsupported_arg(invalid, allow_null = TRUE) sql_expr(make_date(!!year, !!month, !!day)) }, get_year = function(x) { @@ -59,27 +60,16 @@ simulate_spark_sql <- function() simulate_dbi("Spark SQL") sql_expr(date_part('DAY', !!x)) }, date_count_between = function(start, end, precision, ..., n = 1L){ - check_dots_empty() - if (precision != "day") { - cli_abort("{.arg precision} must be {.val day} on SQL backends.") - } - if (n != 1) { - cli_abort("{.arg n} must be {.val 1} on SQL backends.") - } + check_unsupported_arg(precision, allowed = "day") + check_unsupported_arg(n, allowed = 1L) sql_expr(datediff(!!end, !!start)) }, difftime = function(time1, time2, tz, units = "days") { - - if (!missing(tz)) { - cli::cli_abort("The {.arg tz} argument is not supported for SQL backends.") - } - - if (units[1] != "days") { - cli::cli_abort('The only supported value for {.arg units} on SQL backends is "days"') - } + check_unsupported_arg(tz) + check_unsupported_arg(units, allowed = "days") sql_expr(datediff(!!time2, !!time1)) } @@ -153,7 +143,8 @@ simulate_spark_sql <- function() simulate_dbi("Spark SQL") indexes = list(), analyze = TRUE, in_transaction = FALSE) { - + check_bool(overwrite) + check_bool(temporary) sql <- glue_sql2( con, "CREATE ", if (overwrite) "OR REPLACE ", diff --git a/R/backend-teradata.R b/R/backend-teradata.R index e5fc0c861..c1b2d6a8b 100644 --- a/R/backend-teradata.R +++ b/R/backend-teradata.R @@ -153,6 +153,7 @@ sql_translation.Teradata <- function(con) { row_number = win_rank("ROW_NUMBER", empty_order = TRUE), weighted.mean = function(x, w, na.rm = T) { # nocov start + check_unsupported_arg(na.rm, allowed = TRUE) win_over( sql_expr(SUM((!!x * !!w))/SUM(!!w)), win_current_group(), @@ -191,6 +192,7 @@ sql_translation.Teradata <- function(con) { }, weighted.mean = function(x, w, na.rm = T) { # nocov start + check_unsupported_arg(na.rm, allowed = TRUE) win_over( sql_expr(SUM((!!x * !!w))/SUM(!!w)), win_current_group(), diff --git a/R/utils.R b/R/utils.R index 04fd20fb1..c7d995cf2 100644 --- a/R/utils.R +++ b/R/utils.R @@ -83,6 +83,7 @@ res_warn_incomplete <- function(res, hint = "n = -1") { } add_temporary_prefix <- function(con, table, temporary = TRUE) { + check_bool(temporary) check_table_path(table) if (!temporary) { diff --git a/tests/testthat/_snaps/backend-mssql.md b/tests/testthat/_snaps/backend-mssql.md index 36d772c8d..3ad0ef302 100644 --- a/tests/testthat/_snaps/backend-mssql.md +++ b/tests/testthat/_snaps/backend-mssql.md @@ -47,7 +47,8 @@ test_translate_sql(date_count_between(date_column_1, date_column_2, "year")) Condition Error in `date_count_between()`: - ! `precision` must be "day" on SQL backends. + ! `precision = "year"` isn't supported on database backends. + i It must be "day" instead. --- @@ -55,7 +56,8 @@ test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5)) Condition Error in `date_count_between()`: - ! `n` must be "1" on SQL backends. + ! `n = 5` isn't supported on database backends. + i It must be 1 instead. # difftime is translated correctly @@ -63,7 +65,8 @@ test_translate_sql(difftime(start_date, end_date, units = "auto")) Condition Error in `difftime()`: - ! The only supported value for `units` on SQL backends is "days" + ! `units = "auto"` isn't supported on database backends. + i It must be "days" instead. --- @@ -71,7 +74,7 @@ test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) Condition Error in `difftime()`: - ! The `tz` argument is not supported for SQL backends. + ! Argument `tz` isn't supported on database backends. # convert between bit and boolean as needed @@ -494,20 +497,6 @@ FROM `df` ORDER BY `y` -# can copy_to() and compute() with temporary tables (#438) - - Code - db <- copy_to(con, df, name = unique_table_name(), temporary = TRUE) - Message - Created a temporary table named #dbplyr_{tmp} - ---- - - Code - db2 <- db %>% mutate(y = x + 1) %>% compute() - Message - Created a temporary table named #dbplyr_{tmp} - # add prefix to temporary table Code diff --git a/tests/testthat/_snaps/backend-postgres.md b/tests/testthat/_snaps/backend-postgres.md index 326582bfc..131096c9d 100644 --- a/tests/testthat/_snaps/backend-postgres.md +++ b/tests/testthat/_snaps/backend-postgres.md @@ -22,7 +22,8 @@ test_translate_sql(date_count_between(date_column_1, date_column_2, "year")) Condition Error in `date_count_between()`: - ! `precision` must be "day" on SQL backends. + ! `precision = "year"` isn't supported on database backends. + i It must be "day" instead. --- @@ -30,7 +31,8 @@ test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5)) Condition Error in `date_count_between()`: - ! `n` must be "1" on SQL backends. + ! `n = 5` isn't supported on database backends. + i It must be 1 instead. # difftime is translated correctly @@ -38,7 +40,8 @@ test_translate_sql(difftime(start_date, end_date, units = "auto")) Condition Error in `difftime()`: - ! The only supported value for `units` on SQL backends is "days" + ! `units = "auto"` isn't supported on database backends. + i It must be "days" instead. --- @@ -46,7 +49,7 @@ test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) Condition Error in `difftime()`: - ! The `tz` argument is not supported for SQL backends. + ! Argument `tz` isn't supported on database backends. # custom window functions translated correctly diff --git a/tests/testthat/_snaps/backend-redshift.md b/tests/testthat/_snaps/backend-redshift.md index b85f9ec96..7254a8a0e 100644 --- a/tests/testthat/_snaps/backend-redshift.md +++ b/tests/testthat/_snaps/backend-redshift.md @@ -41,7 +41,8 @@ test_translate_sql(date_count_between(date_column_1, date_column_2, "year")) Condition Error in `date_count_between()`: - ! `precision` must be "day" on SQL backends. + ! `precision = "year"` isn't supported on database backends. + i It must be "day" instead. --- @@ -49,7 +50,8 @@ test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5)) Condition Error in `date_count_between()`: - ! `n` must be "1" on SQL backends. + ! `n = 5` isn't supported on database backends. + i It must be 1 instead. # difftime is translated correctly @@ -57,7 +59,8 @@ test_translate_sql(difftime(start_date, end_date, units = "auto")) Condition Error in `difftime()`: - ! The only supported value for `units` on SQL backends is "days" + ! `units = "auto"` isn't supported on database backends. + i It must be "days" instead. --- @@ -65,5 +68,5 @@ test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) Condition Error in `difftime()`: - ! The `tz` argument is not supported for SQL backends. + ! Argument `tz` isn't supported on database backends. diff --git a/tests/testthat/_snaps/backend-spark-sql.md b/tests/testthat/_snaps/backend-spark-sql.md index cc9dc554e..2ee82a49a 100644 --- a/tests/testthat/_snaps/backend-spark-sql.md +++ b/tests/testthat/_snaps/backend-spark-sql.md @@ -4,7 +4,8 @@ test_translate_sql(date_count_between(date_column_1, date_column_2, "year")) Condition Error in `date_count_between()`: - ! `precision` must be "day" on SQL backends. + ! `precision = "year"` isn't supported on database backends. + i It must be "day" instead. --- @@ -12,7 +13,8 @@ test_translate_sql(date_count_between(date_column_1, date_column_2, "day", n = 5)) Condition Error in `date_count_between()`: - ! `n` must be "1" on SQL backends. + ! `n = 5` isn't supported on database backends. + i It must be 1 instead. # difftime is translated correctly @@ -20,7 +22,8 @@ test_translate_sql(difftime(start_date, end_date, units = "auto")) Condition Error in `difftime()`: - ! The only supported value for `units` on SQL backends is "days" + ! `units = "auto"` isn't supported on database backends. + i It must be "days" instead. --- @@ -28,5 +31,5 @@ test_translate_sql(difftime(start_date, end_date, tz = "UTC", units = "days")) Condition Error in `difftime()`: - ! The `tz` argument is not supported for SQL backends. + ! Argument `tz` isn't supported on database backends. From 2d407477a124619a6dc82c4c5157c17c1e475d03 Mon Sep 17 00:00:00 2001 From: Hadley Wickham Date: Mon, 8 Apr 2024 09:12:19 -0500 Subject: [PATCH 14/17] Add blog post --- _pkgdown.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/_pkgdown.yml b/_pkgdown.yml index 4acd672b2..a258266e7 100644 --- a/_pkgdown.yml +++ b/_pkgdown.yml @@ -96,6 +96,8 @@ reference: news: releases: + - text: "Version 2.5.0" + href: https://www.tidyverse.org/blog/2024/04/dbplyr-2-5-0/ - text: "Version 2.4.0" href: https://www.tidyverse.org/blog/2023/10/dbplyr-2-4-0/ - text: "Version 2.3.0" From 71b677e8b9ad3e2f2b3d859ea761dcf4235eefd9 Mon Sep 17 00:00:00 2001 From: Hadley Wickham Date: Fri, 29 Nov 2024 14:13:33 -0600 Subject: [PATCH 15/17] Fix urls --- DESCRIPTION | 2 +- R/backend-mssql.R | 2 +- R/backend-oracle.R | 2 +- man/backend-mssql.Rd | 2 +- man/backend-oracle.Rd | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index fb1835022..dec46094b 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -58,7 +58,7 @@ Config/testthat/parallel: TRUE Encoding: UTF-8 Language: en-gb Roxygen: list(markdown = TRUE) -RoxygenNote: 7.3.1 +RoxygenNote: 7.3.2 Collate: 'db-sql.R' 'utils-check.R' diff --git a/R/backend-mssql.R b/R/backend-mssql.R index af30d2859..ac65bc08e 100644 --- a/R/backend-mssql.R +++ b/R/backend-mssql.R @@ -22,7 +22,7 @@ #' #' * The `BOOLEAN` type is the result of logical comparisons (e.g. `x > y`) #' and can be used `WHERE` but not to create new columns in `SELECT`. -#' +#' #' #' * The `BIT` type is a special type of numeric column used to store #' `TRUE` and `FALSE` values, but can't be used in `WHERE` clauses. diff --git a/R/backend-oracle.R b/R/backend-oracle.R index 467f3eb7f..856869c13 100644 --- a/R/backend-oracle.R +++ b/R/backend-oracle.R @@ -13,7 +13,7 @@ #' #' Note that versions of Oracle prior to 23c have limited supported for #' `TRUE` and `FALSE` and you may need to use `1` and `0` instead. -#' See for +#' See for #' more details. #' #' Use `simulate_oracle()` with `lazy_frame()` to see simulated SQL without diff --git a/man/backend-mssql.Rd b/man/backend-mssql.Rd index 7525ffdb5..eca5b21ed 100644 --- a/man/backend-mssql.Rd +++ b/man/backend-mssql.Rd @@ -34,7 +34,7 @@ values: \itemize{ \item The \code{BOOLEAN} type is the result of logical comparisons (e.g. \code{x > y}) and can be used \code{WHERE} but not to create new columns in \code{SELECT}. -\url{https://docs.microsoft.com/en-us/sql/t-sql/language-elements/comparison-operators-transact-sql} +\url{https://learn.microsoft.com/en-us/sql/t-sql/language-elements/comparison-operators-transact-sql} \item The \code{BIT} type is a special type of numeric column used to store \code{TRUE} and \code{FALSE} values, but can't be used in \code{WHERE} clauses. \url{https://learn.microsoft.com/en-us/sql/t-sql/data-types/bit-transact-sql?view=sql-server-ver15} diff --git a/man/backend-oracle.Rd b/man/backend-oracle.Rd index d68275cf0..9562eb0fd 100644 --- a/man/backend-oracle.Rd +++ b/man/backend-oracle.Rd @@ -20,7 +20,7 @@ are: Note that versions of Oracle prior to 23c have limited supported for \code{TRUE} and \code{FALSE} and you may need to use \code{1} and \code{0} instead. -See \url{https://oracle-base.com/articles/23c/boolean-data-type-23c} for +See \url{https://oracle-base.com/articles/23/boolean-data-type-23} for more details. Use \code{simulate_oracle()} with \code{lazy_frame()} to see simulated SQL without From f8ea630c4aac84a310df868b9f65d8565e5d7f45 Mon Sep 17 00:00:00 2001 From: Salim B Date: Mon, 2 Dec 2024 00:19:33 +0100 Subject: [PATCH 16/17] Fix doc and tweak formatting (#1543) --- R/remote.R | 12 ++++++------ man/remote_name.Rd | 12 ++++++------ 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/R/remote.R b/R/remote.R index b267e2966..3a94c5220 100644 --- a/R/remote.R +++ b/R/remote.R @@ -7,16 +7,16 @@ #' the query plan (as computed by the remote database). `remote_src()` and #' `remote_con()` give the dplyr source and DBI connection respectively. #' -#' @param x Remote table, currently must be a [tbl_sql]. +#' @param x Remote table, currently must be a [`tbl_sql`]. #' @param null_if_local Return `NULL` if the remote table is created via -#' `tbl_lazy()` or `lazy_frame()`? +#' [tbl_lazy()] or [lazy_frame()]? #' @param cte `r lifecycle::badge("deprecated")` -#' Use the `render_otions` argument instead. +#' Use the `sql_options` argument instead. #' @param sql_options `r lifecycle::badge("experimental")` -#' SQL rendering options generated by `sql_options()`. +#' SQL rendering options generated by [sql_options()]. #' @param ... Additional arguments passed on to methods. -#' @return The value, or `NULL` if not remote table, or not applicable. -#' For example, computed queries do not have a "name" +#' @return A string, or `NULL` if not a remote table, or not applicable. +#' For example, computed queries do not have a "name". #' @export #' @examples #' mf <- memdb_frame(x = 1:5, y = 5:1, .name = "blorp") diff --git a/man/remote_name.Rd b/man/remote_name.Rd index 2c2d70f87..d24445e4c 100644 --- a/man/remote_name.Rd +++ b/man/remote_name.Rd @@ -22,22 +22,22 @@ remote_query(x, cte = FALSE, sql_options = NULL) remote_query_plan(x, ...) } \arguments{ -\item{x}{Remote table, currently must be a \link{tbl_sql}.} +\item{x}{Remote table, currently must be a \code{\link{tbl_sql}}.} \item{null_if_local}{Return \code{NULL} if the remote table is created via -\code{tbl_lazy()} or \code{lazy_frame()}?} +\code{\link[=tbl_lazy]{tbl_lazy()}} or \code{\link[=lazy_frame]{lazy_frame()}}?} \item{cte}{\ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}} -Use the \code{render_otions} argument instead.} +Use the \code{sql_options} argument instead.} \item{sql_options}{\ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#experimental}{\figure{lifecycle-experimental.svg}{options: alt='[Experimental]'}}}{\strong{[Experimental]}} -SQL rendering options generated by \code{sql_options()}.} +SQL rendering options generated by \code{\link[=sql_options]{sql_options()}}.} \item{...}{Additional arguments passed on to methods.} } \value{ -The value, or \code{NULL} if not remote table, or not applicable. -For example, computed queries do not have a "name" +A string, or \code{NULL} if not a remote table, or not applicable. +For example, computed queries do not have a "name". } \description{ \code{remote_name()} gives the unescaped name of the remote table, or \code{NULL} if it From 572693098d0098018b6bb2d1e88b650d2e1e1580 Mon Sep 17 00:00:00 2001 From: "Simon P. Couch" Date: Wed, 4 Dec 2024 08:27:21 -0600 Subject: [PATCH 17/17] update SQLite snap (#1563) --- DESCRIPTION | 2 +- tests/testthat/_snaps/backend-sqlite.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index dec46094b..3fee17fd1 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -48,7 +48,7 @@ Suggests: rmarkdown, RPostgres (>= 1.4.5), RPostgreSQL, - RSQLite (>= 2.3.1), + RSQLite (>= 2.3.8), testthat (>= 3.1.10) VignetteBuilder: knitr diff --git a/tests/testthat/_snaps/backend-sqlite.md b/tests/testthat/_snaps/backend-sqlite.md index 0b0d3a20a..9c6450a31 100644 --- a/tests/testthat/_snaps/backend-sqlite.md +++ b/tests/testthat/_snaps/backend-sqlite.md @@ -36,5 +36,5 @@ id parent notused detail - 1 2 0 0 SEARCH test USING COVERING INDEX test_x (x>?) + 1 2 0 35 SEARCH test USING COVERING INDEX test_x (x>?)