Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 10 additions & 10 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
# brickster 0.2.10
- Increment version of httr2 required (>= 1.1.1)
- DBI connections expose `max_active_connections` and `fetch_timeout` to
control result download concurrency and timeouts
- DBI/dbplyr write table methods now make two transactions (create empty table --> insert into) to ensure type correctness
- Allow optional schedules in `db_jobs_reset()` and propagate parameters in reset/update requests.

- Increment version of httr2 required (\>= 1.1.1)
- DBI connections expose `max_active_connections` and `fetch_timeout` to control result download concurrency and timeouts
- DBI/dbplyr write table methods now make two transactions (create empty table --\> insert into) to ensure type correctness
- Allow optional schedules in `db_jobs_reset()` and propagate parameters in reset/update requests.
- DBI/dbplyr inline writes now preserve single quotes in character columns via explicit escaping (#130)

# brickster 0.2.9
- Added DBI + dbplyr backend support: `DatabricksSQL()` driver for standard DBI operations
- Increase support for job level parameters
- Added `db_jobs_repair_run`

- Added DBI + dbplyr backend support: `DatabricksSQL()` driver for standard DBI operations
- Increase support for job level parameters
- Added `db_jobs_repair_run`

# brickster 0.2.8

Expand Down Expand Up @@ -58,4 +58,4 @@
- Adding OAuth U2M support (workspace level), considered the default when `DATABRICKS_TOKEN` isn't specified (e.g `db_token()` returns `NULL`)
- Updating authentication vignette to include information on OAuth
- Updating README.md to include quick start and clearer information
- Adding vector search index functions
- Adding vector search index functions
23 changes: 17 additions & 6 deletions R/databricks-dbi.R
Original file line number Diff line number Diff line change
Expand Up @@ -1244,7 +1244,7 @@ db_generate_values_sql <- function(conn, data) {
if (is.na(val)) {
"NULL"
} else if (is.character(val)) {
paste0("'", gsub("'", "''", val), "'") # Escape single quotes
db_escape_string_literal(conn, val)
} else if (is.logical(val)) {
if (val) "TRUE" else "FALSE"
} else {
Expand Down Expand Up @@ -1274,10 +1274,10 @@ db_generate_typed_values_sql <- function(conn, data) {
as.character(val)
} else if (is.character(col_data)) {
# Quote string values and escape single quotes
paste0("'", gsub("'", "''", val), "'")
db_escape_string_literal(conn, val)
} else {
# Default to quoted string for other types
paste0("'", gsub("'", "''", as.character(val)), "'")
db_escape_string_literal(conn, as.character(val))
}
})
paste0("(", paste(values, collapse = ", "), ")")
Expand All @@ -1286,6 +1286,19 @@ db_generate_typed_values_sql <- function(conn, data) {
paste(row_values, collapse = ", ")
}

#' Escape string literals for inline SQL VALUES
#' @keywords internal
db_escape_string_literal <- function(conn, val) {
if (is.na(val)) {
return("NULL")
}

# Spark SQL accepts backslash-escaped quotes; escape backslashes first
escaped <- gsub("\\\\", "\\\\\\\\", val)
escaped <- gsub("'", "\\\\'", escaped)
paste0("'", escaped, "'")
}

#' Create table with explicit schema before inserting values
#' @keywords internal
db_create_table_as_select_values <- function(
Expand Down Expand Up @@ -1368,15 +1381,13 @@ db_should_use_volume_method <- function(
) {
n_rows <- nrow(value)
has_volume <- !is.null(staging_volume) && nchar(staging_volume) > 0
has_arrow <- rlang::is_installed("arrow")

# Temporary tables should use standard method (COPY INTO may not support them)
if (temporary) {
return(FALSE)
}

# Check if arrow is available
has_arrow <- rlang::is_installed("arrow")

# Check dataset size limits without volume staging
if (!has_volume) {
if (n_rows > 50000) {
Expand Down
4 changes: 2 additions & 2 deletions R/databricks-dbplyr.R
Original file line number Diff line number Diff line change
Expand Up @@ -245,10 +245,10 @@ db_generate_typed_values_sql_for_view <- function(con, data) {
as.character(val)
} else if (is.character(col_data)) {
# Quote string values and escape single quotes
paste0("'", gsub("'", "''", val), "'")
db_escape_string_literal(con, val)
} else {
# Default to quoted string for other types
paste0("'", gsub("'", "''", as.character(val)), "'")
db_escape_string_literal(con, as.character(val))
}
})
paste0("(", paste(values, collapse = ", "), ")")
Expand Down
12 changes: 12 additions & 0 deletions man/db_escape_string_literal.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

28 changes: 28 additions & 0 deletions tests/testthat/test-databricks-dbi.R
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,34 @@ test_that("Quote handling utility functions work", {
expect_equal(clean_quoted('"samples.nyctaxi.trips"'), "samples.nyctaxi.trips")
})

test_that("db_generate_typed_values_sql preserves single quotes", {
con <- new(
"DatabricksConnection",
warehouse_id = "test_warehouse",
host = "test_host",
token = "test_token",
catalog = "",
schema = "",
staging_volume = ""
)

test_value <- "O'Connor & D'Angelo's data"

values_sql <- brickster:::db_generate_typed_values_sql(
con,
data.frame(test = test_value, stringsAsFactors = FALSE)
)

expect_equal(values_sql, "('O\\'Connor & D\\'Angelo\\'s data')")

view_values_sql <- brickster:::db_generate_typed_values_sql_for_view(
con,
data.frame(test = test_value, stringsAsFactors = FALSE)
)

expect_equal(view_values_sql, "('O\\'Connor & D\\'Angelo\\'s data')")
})

test_that("DatabricksResult show method works", {
# Create a result object for testing
res <- new(
Expand Down