Skip to content

Increase column limit to 1999 #50

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Dec 30, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 49 additions & 16 deletions crates/core/src/diff.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
extern crate alloc;


use alloc::format;
use alloc::string::{String, ToString};
use core::ffi::c_int;
use core::slice;

use sqlite::{ResultCode};
use sqlite::ResultCode;
use sqlite_nostd as sqlite;
use sqlite_nostd::{Connection, Context, Value};

@@ -26,7 +25,6 @@ fn powersync_diff_impl(
}

pub fn diff_objects(data_old: &str, data_new: &str) -> Result<String, SQLiteError> {

let v_new: json::Value = json::from_str(data_new)?;
let v_old: json::Value = json::from_str(data_old)?;

@@ -81,7 +79,6 @@ pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> {
Ok(())
}


#[cfg(test)]
mod tests {
use super::*;
@@ -91,17 +88,53 @@ mod tests {
assert_eq!(diff_objects("{}", "{}").unwrap(), "{}");
assert_eq!(diff_objects(r#"{"a": null}"#, "{}").unwrap(), "{}");
assert_eq!(diff_objects(r#"{}"#, r#"{"a": null}"#).unwrap(), "{}");
assert_eq!(diff_objects(r#"{"b": 1}"#, r#"{"a": null, "b": 1}"#).unwrap(), "{}");
assert_eq!(diff_objects(r#"{"b": 1}"#, r#"{"a": null, "b": 2}"#).unwrap(), r#"{"b":2}"#);
assert_eq!(diff_objects(r#"{"a": 0, "b": 1}"#, r#"{"a": null, "b": 2}"#).unwrap(), r#"{"a":null,"b":2}"#);
assert_eq!(diff_objects(r#"{"a": 1}"#, r#"{"a": null}"#).unwrap(), r#"{"a":null}"#);
assert_eq!(diff_objects(r#"{"a": 1}"#, r#"{}"#).unwrap(), r#"{"a":null}"#);
assert_eq!(diff_objects(r#"{"a": 1}"#, r#"{"a": 2}"#).unwrap(), r#"{"a":2}"#);
assert_eq!(diff_objects(r#"{"a": 1}"#, r#"{"a": "1"}"#).unwrap(), r#"{"a":"1"}"#);
assert_eq!(diff_objects(r#"{"a": 1}"#, r#"{"a": 1.0}"#).unwrap(), r#"{"a":1.0}"#);
assert_eq!(diff_objects(r#"{"a": 1.00}"#, r#"{"a": 1.0}"#).unwrap(), r#"{}"#);
assert_eq!(diff_objects(r#"{}"#, r#"{"a": 1.0}"#).unwrap(), r#"{"a":1.0}"#);
assert_eq!(diff_objects(r#"{}"#, r#"{"a": [1,2,3]}"#).unwrap(), r#"{"a":[1,2,3]}"#);
assert_eq!(diff_objects(r#"{"a": 1}"#, r#"{"a": [1,2,3]}"#).unwrap(), r#"{"a":[1,2,3]}"#);
assert_eq!(
diff_objects(r#"{"b": 1}"#, r#"{"a": null, "b": 1}"#).unwrap(),
"{}"
);
assert_eq!(
diff_objects(r#"{"b": 1}"#, r#"{"a": null, "b": 2}"#).unwrap(),
r#"{"b":2}"#
);
assert_eq!(
diff_objects(r#"{"a": 0, "b": 1}"#, r#"{"a": null, "b": 2}"#).unwrap(),
r#"{"a":null,"b":2}"#
);
assert_eq!(
diff_objects(r#"{"a": 1}"#, r#"{"a": null}"#).unwrap(),
r#"{"a":null}"#
);
assert_eq!(
diff_objects(r#"{"a": 1}"#, r#"{}"#).unwrap(),
r#"{"a":null}"#
);
assert_eq!(
diff_objects(r#"{"a": 1}"#, r#"{"a": 2}"#).unwrap(),
r#"{"a":2}"#
);
assert_eq!(
diff_objects(r#"{"a": 1}"#, r#"{"a": "1"}"#).unwrap(),
r#"{"a":"1"}"#
);
assert_eq!(
diff_objects(r#"{"a": 1}"#, r#"{"a": 1.0}"#).unwrap(),
r#"{"a":1.0}"#
);
assert_eq!(
diff_objects(r#"{"a": 1.00}"#, r#"{"a": 1.0}"#).unwrap(),
r#"{}"#
);
assert_eq!(
diff_objects(r#"{}"#, r#"{"a": 1.0}"#).unwrap(),
r#"{"a":1.0}"#
);
assert_eq!(
diff_objects(r#"{}"#, r#"{"a": [1,2,3]}"#).unwrap(),
r#"{"a":[1,2,3]}"#
);
assert_eq!(
diff_objects(r#"{"a": 1}"#, r#"{"a": [1,2,3]}"#).unwrap(),
r#"{"a":[1,2,3]}"#
);
}
}
69 changes: 69 additions & 0 deletions crates/core/src/json_merge.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
extern crate alloc;

use alloc::format;
use alloc::string::{String, ToString};
use core::ffi::c_int;
use core::slice;

use sqlite::ResultCode;
use sqlite_nostd as sqlite;
use sqlite_nostd::{Connection, Context, Value};

use crate::create_sqlite_text_fn;
use crate::error::SQLiteError;

/// Given any number of JSON TEXT arguments, merge them into a single JSON object.
///
/// This assumes each argument is a valid JSON object, with no duplicate keys.
/// No JSON parsing or validation is performed - this performs simple string concatenation.
fn powersync_json_merge_impl(
_ctx: *mut sqlite::context,
args: &[*mut sqlite::value],
) -> Result<String, SQLiteError> {
if args.is_empty() {
return Ok("{}".to_string());
}
let mut result = String::from("{");
for arg in args {
let chunk = arg.text();
if chunk.is_empty() || !chunk.starts_with('{') || !chunk.ends_with('}') {
return Err(SQLiteError::from(ResultCode::MISMATCH));
}

// Strip outer braces
let inner = &chunk[1..(chunk.len() - 1)];

// If this is not the first chunk, insert a comma
if result.len() > 1 {
result.push(',');
}

// Append the inner content
result.push_str(inner);
}

// Close the outer brace
result.push('}');
Ok(result)
}

create_sqlite_text_fn!(
powersync_json_merge,
powersync_json_merge_impl,
"powersync_json_merge"
);

pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> {
db.create_function_v2(
"powersync_json_merge",
-1,
sqlite::UTF8 | sqlite::DETERMINISTIC,
None,
Some(powersync_json_merge),
None,
None,
None,
)?;

Ok(())
}
2 changes: 2 additions & 0 deletions crates/core/src/lib.rs
Original file line number Diff line number Diff line change
@@ -18,6 +18,7 @@ mod diff;
mod error;
mod ext;
mod fix035;
mod json_merge;
mod kv;
mod macros;
mod migrations;
@@ -55,6 +56,7 @@ fn init_extension(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> {
crate::views::register(db)?;
crate::uuid::register(db)?;
crate::diff::register(db)?;
crate::json_merge::register(db)?;
crate::view_admin::register(db)?;
crate::checkpoint::register(db)?;
crate::kv::register(db)?;
86 changes: 54 additions & 32 deletions crates/core/src/views.rs
Original file line number Diff line number Diff line change
@@ -7,7 +7,7 @@ use core::ffi::c_int;
use core::slice;

use sqlite::{Connection, Context, ResultCode, Value};
use sqlite_nostd as sqlite;
use sqlite_nostd::{self as sqlite, ManagedStmt};

use crate::create_sqlite_text_fn;
use crate::error::{PSResult, SQLiteError};
@@ -143,16 +143,7 @@ fn powersync_trigger_insert_sql_impl(
let local_db = ctx.db_handle();
let stmt2 = local_db.prepare_v2("select json_extract(e.value, '$.name') as name from json_each(json_extract(?, '$.columns')) e")?;
stmt2.bind_text(1, table, sqlite::Destructor::STATIC)?;

let mut column_names_quoted: Vec<String> = alloc::vec![];
while stmt2.step()? == ResultCode::ROW {
let name = stmt2.column_text(0)?;

let foo: String = format!("{:}, NEW.{:}", quote_string(name), quote_identifier(name));
column_names_quoted.push(foo);
}

let json_fragment = column_names_quoted.join(", ");
let json_fragment = json_object_fragment("NEW", &stmt2)?;

return if !local_only && !insert_only {
let trigger = format!("\
@@ -165,8 +156,8 @@ fn powersync_trigger_insert_sql_impl(
THEN RAISE (FAIL, 'id is required')
END;
INSERT INTO {:}
SELECT NEW.id, json_object({:});
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', {:}, 'id', NEW.id, 'data', json(powersync_diff('{{}}', json_object({:})))));
SELECT NEW.id, {:};
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', {:}, 'id', NEW.id, 'data', json(powersync_diff('{{}}', {:}))));
INSERT OR IGNORE INTO ps_updated_rows(row_type, row_id) VALUES({:}, NEW.id);
INSERT OR REPLACE INTO ps_buckets(name, last_op, target_op) VALUES('$local', 0, {:});
END", trigger_name, quoted_name, internal_name, json_fragment, type_string, json_fragment, type_string, MAX_OP_ID);
@@ -178,7 +169,7 @@ fn powersync_trigger_insert_sql_impl(
INSTEAD OF INSERT ON {:}
FOR EACH ROW
BEGIN
INSERT INTO {:} SELECT NEW.id, json_object({:});
INSERT INTO {:} SELECT NEW.id, {:};
END",
trigger_name, quoted_name, internal_name, json_fragment
);
@@ -189,7 +180,7 @@ fn powersync_trigger_insert_sql_impl(
INSTEAD OF INSERT ON {:}
FOR EACH ROW
BEGIN
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', {}, 'id', NEW.id, 'data', json(powersync_diff('{{}}', json_object({:})))));
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', {}, 'id', NEW.id, 'data', json(powersync_diff('{{}}', {:}))));
END", trigger_name, quoted_name, type_string, json_fragment);
Ok(trigger)
} else {
@@ -224,20 +215,9 @@ fn powersync_trigger_update_sql_impl(
let db = ctx.db_handle();
let stmt2 = db.prepare_v2("select json_extract(e.value, '$.name') as name from json_each(json_extract(?, '$.columns')) e").into_db_result(db)?;
stmt2.bind_text(1, table, sqlite::Destructor::STATIC)?;

let mut column_names_quoted_new: Vec<String> = alloc::vec![];
let mut column_names_quoted_old: Vec<String> = alloc::vec![];
while stmt2.step()? == ResultCode::ROW {
let name = stmt2.column_text(0)?;

let foo_new: String = format!("{:}, NEW.{:}", quote_string(name), quote_identifier(name));
column_names_quoted_new.push(foo_new);
let foo_old: String = format!("{:}, OLD.{:}", quote_string(name), quote_identifier(name));
column_names_quoted_old.push(foo_old);
}

let json_fragment_new = column_names_quoted_new.join(", ");
let json_fragment_old = column_names_quoted_old.join(", ");
let json_fragment_new = json_object_fragment("NEW", &stmt2)?;
stmt2.reset()?;
let json_fragment_old = json_object_fragment("OLD", &stmt2)?;

return if !local_only && !insert_only {
let trigger = format!("\
@@ -250,9 +230,9 @@ BEGIN
THEN RAISE (FAIL, 'Cannot update id')
END;
UPDATE {:}
SET data = json_object({:})
SET data = {:}
WHERE id = NEW.id;
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PATCH', 'type', {:}, 'id', NEW.id, 'data', json(powersync_diff(json_object({:}), json_object({:})))));
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PATCH', 'type', {:}, 'id', NEW.id, 'data', json(powersync_diff({:}, {:}))));
INSERT OR IGNORE INTO ps_updated_rows(row_type, row_id) VALUES({:}, NEW.id);
INSERT OR REPLACE INTO ps_buckets(name, last_op, target_op) VALUES('$local', 0, {:});
END", trigger_name, quoted_name, internal_name, json_fragment_new, type_string, json_fragment_old, json_fragment_new, type_string, MAX_OP_ID);
@@ -269,7 +249,7 @@ BEGIN
THEN RAISE (FAIL, 'Cannot update id')
END;
UPDATE {:}
SET data = json_object({:})
SET data = {:}
WHERE id = NEW.id;
END",
trigger_name, quoted_name, internal_name, json_fragment_new
@@ -335,3 +315,45 @@ pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> {

Ok(())
}

/// Given a query returning column names, return a JSON object fragment for a trigger.
///
/// Example output with prefix "NEW": "json_object('id', NEW.id, 'name', NEW.name, 'age', NEW.age)".
fn json_object_fragment(prefix: &str, name_results: &ManagedStmt) -> Result<String, SQLiteError> {
// floor(SQLITE_MAX_FUNCTION_ARG / 2).
// To keep databases portable, we use the default limit of 100 args for this,
// and don't try to query the limit dynamically.
const MAX_ARG_COUNT: usize = 50;

let mut column_names_quoted: Vec<String> = alloc::vec![];
while name_results.step()? == ResultCode::ROW {
let name = name_results.column_text(0)?;

let quoted: String = format!(
"{:}, {:}.{:}",
quote_string(name),
prefix,
quote_identifier(name)
);
column_names_quoted.push(quoted);
}

// SQLITE_MAX_COLUMN - 1 (because of the id column)
if column_names_quoted.len() > 1999 {
return Err(SQLiteError::from(ResultCode::TOOBIG));
} else if column_names_quoted.len() <= MAX_ARG_COUNT {
// Small number of columns - use json_object() directly.
let json_fragment = column_names_quoted.join(", ");
return Ok(format!("json_object({:})", json_fragment));
} else {
// Too many columns to use json_object directly.
// Instead, we build up the JSON object in chunks,
// and merge using powersync_json_merge().
let mut fragments: Vec<String> = alloc::vec![];
for chunk in column_names_quoted.chunks(MAX_ARG_COUNT) {
let sub_fragment = chunk.join(", ");
fragments.push(format!("json_object({:})", sub_fragment));
}
return Ok(format!("powersync_json_merge({:})", fragments.join(", ")));
}
}
230 changes: 230 additions & 0 deletions dart/test/crud_test.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,230 @@
import 'dart:convert';

import 'package:sqlite3/common.dart';
import 'package:test/test.dart';

import 'utils/native_test_utils.dart';

void main() {
group('crud tests', () {
late CommonDatabase db;

setUp(() async {
db = openTestDatabase();
});

tearDown(() {
db.dispose();
});

test('powersync_diff - single value', () {
var r1 =
db.select('select powersync_diff(?, ?) as diff', ['{}', '{}']).first;
expect(r1['diff'], equals('{}'));

var r2 = db.select(
'select powersync_diff(?, ?) as diff', ['{}', '{"test":1}']).first;
expect(r2['diff'], equals('{"test":1}'));

var r3 = db.select('select powersync_diff(?, ?) as diff',
['{"test":1}', '{"test":1}']).first;
expect(r3['diff'], equals('{}'));

var r4 = db.select(
'select powersync_diff(?, ?) as diff', ['{"test":1}', '{}']).first;
expect(r4['diff'], equals('{"test":null}'));

var r5 = db.select('select powersync_diff(?, ?) as diff',
['{"test":1}', '{"test":null}']).first;
expect(r5['diff'], equals('{"test":null}'));

var r6 = db.select('select powersync_diff(?, ?) as diff',
['{"test":1}', '{"test":2}']).first;
expect(r6['diff'], equals('{"test":2}'));
});

test('powersync_diff - multiple values', () {
var r1 = db.select('select powersync_diff(?, ?) as diff',
['{"a":1,"b":"test"}', '{}']).first;
expect(r1['diff'], equals('{"a":null,"b":null}'));

var r2 = db.select('select powersync_diff(?, ?) as diff',
['{}', '{"a":1,"b":"test"}']).first;
expect(r2['diff'], equals('{"a":1,"b":"test"}'));

var r3 = db.select('select powersync_diff(?, ?) as diff',
['{"a":1,"b":"test"}', '{"a":1,"b":"test"}']).first;
expect(r3['diff'], equals('{}'));

var r4 = db.select('select powersync_diff(?, ?) as diff',
['{"a":1}', '{"b":"test"}']).first;
expect(r4['diff'], equals('{"a":null,"b":"test"}'));

var r5 = db.select('select powersync_diff(?, ?) as diff',
['{"a":1}', '{"a":1,"b":"test"}']).first;
expect(r5['diff'], equals('{"b":"test"}'));
});

var runCrudTest = (int numberOfColumns) {
var columns = [];
for (var i = 0; i < numberOfColumns; i++) {
columns.add({'name': 'column$i', 'type': 'TEXT'});
}
var tableSchema = {
'tables': [
{'name': 'items', 'columns': columns}
]
};
db.select('select powersync_init()');

// 1. Test schema initialization
db.select(
'select powersync_replace_schema(?)', [jsonEncode(tableSchema)]);

var columnNames = columns.map((c) => c['name']).join(', ');
var columnValues = columns.map((c) => "'${c['name']}'").join(', ');

// 2. Test insert
db.select(
"insert into items(id, ${columnNames}) values('test_id', ${columnValues})");
var item = db.select('select * from items').first;
var expectedData =
Map.fromEntries(columns.map((c) => MapEntry(c['name'], c['name'])));

expect(item, equals({'id': 'test_id', ...expectedData}));
var crud = db.select('select * from ps_crud').first;
var crudData = jsonDecode(crud['data']);
expect(crud['tx_id'], equals(1));
expect(
crudData,
equals({
'op': 'PUT',
'type': 'items',
'id': 'test_id',
'data': expectedData
}));

// 3. Test update
db.select('update items set column0 = ?', ['new_value']);
var itemUpdated = db.select('select * from items').first;
expect(itemUpdated,
equals({'id': 'test_id', ...expectedData, 'column0': 'new_value'}));

var crudUpdated = db.select('select * from ps_crud where id = 2').first;
var crudDataUpdated = jsonDecode(crudUpdated['data']);
expect(crudUpdated['tx_id'], equals(2));
expect(
crudDataUpdated,
equals({
'op': 'PATCH',
'type': 'items',
'id': 'test_id',
'data': {'column0': 'new_value'}
}));

// 4. Test delete
db.select('delete from items');
var itemDeleted = db.select('select * from items').firstOrNull;
expect(itemDeleted, equals(null));

var crudDeleted = db.select('select * from ps_crud where id = 3').first;
var crudDataDeleted = jsonDecode(crudDeleted['data']);
expect(crudDeleted['tx_id'], equals(3));
expect(crudDataDeleted,
equals({'op': 'DELETE', 'type': 'items', 'id': 'test_id'}));
};

var runCrudTestLocalOnly = (int numberOfColumns) {
var columns = [];
for (var i = 0; i < numberOfColumns; i++) {
columns.add({'name': 'column$i', 'type': 'TEXT'});
}
var tableSchema = {
'tables': [
{'name': 'items', 'columns': columns, 'local_only': true}
]
};
db.select('select powersync_init()');

// 1. Test schema initialization
db.select(
'select powersync_replace_schema(?)', [jsonEncode(tableSchema)]);

var columnNames = columns.map((c) => c['name']).join(', ');
var columnValues = columns.map((c) => "'${c['name']}'").join(', ');

// 2. Test insert
db.select(
"insert into items(id, ${columnNames}) values('test_id', ${columnValues})");
var item = db.select('select * from items').first;
var expectedData =
Map.fromEntries(columns.map((c) => MapEntry(c['name'], c['name'])));

expect(item, equals({'id': 'test_id', ...expectedData}));

// 3. Test update
db.select('update items set column0 = ?', ['new_value']);
var itemUpdated = db.select('select * from items').first;
expect(itemUpdated,
equals({'id': 'test_id', ...expectedData, 'column0': 'new_value'}));

// 4. Test delete
db.select('delete from items');
var itemDeleted = db.select('select * from items').firstOrNull;
expect(itemDeleted, equals(null));
};

var runCrudTestInsertOnly = (int numberOfColumns) {
var columns = [];
for (var i = 0; i < numberOfColumns; i++) {
columns.add({'name': 'column$i', 'type': 'TEXT'});
}
var tableSchema = {
'tables': [
{'name': 'items', 'columns': columns, 'insert_only': true}
]
};
db.select('select powersync_init()');

// 1. Test schema initialization
db.select(
'select powersync_replace_schema(?)', [jsonEncode(tableSchema)]);

var columnNames = columns.map((c) => c['name']).join(', ');
var columnValues = columns.map((c) => "'${c['name']}'").join(', ');

// 2. Test insert
db.select(
"insert into items(id, ${columnNames}) values('test_id', ${columnValues})");
var item = db.select('select * from items').firstOrNull;
expect(item, equals(null));
var expectedData =
Map.fromEntries(columns.map((c) => MapEntry(c['name'], c['name'])));

var crud = db.select('select * from ps_crud').first;
var crudData = jsonDecode(crud['data']);
expect(crud['tx_id'], equals(1));
expect(
crudData,
equals({
'op': 'PUT',
'type': 'items',
'id': 'test_id',
'data': expectedData
}));
};

for (var numberOfColumns in [1, 49, 50, 51, 63, 64, 100, 1999]) {
test('crud test with $numberOfColumns columns', () async {
runCrudTest(numberOfColumns);
});
test('crud test with $numberOfColumns columns - local_only', () async {
runCrudTestLocalOnly(numberOfColumns);
});

test('crud test with $numberOfColumns columns - insert_only', () async {
runCrudTestInsertOnly(numberOfColumns);
});
}
});
}