Skip to content

Increase column limit to 1999 #50

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Dec 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 49 additions & 16 deletions crates/core/src/diff.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
extern crate alloc;


use alloc::format;
use alloc::string::{String, ToString};
use core::ffi::c_int;
use core::slice;

use sqlite::{ResultCode};
use sqlite::ResultCode;
use sqlite_nostd as sqlite;
use sqlite_nostd::{Connection, Context, Value};

Expand All @@ -26,7 +25,6 @@ fn powersync_diff_impl(
}

pub fn diff_objects(data_old: &str, data_new: &str) -> Result<String, SQLiteError> {

let v_new: json::Value = json::from_str(data_new)?;
let v_old: json::Value = json::from_str(data_old)?;

Expand Down Expand Up @@ -81,7 +79,6 @@ pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> {
Ok(())
}


#[cfg(test)]
mod tests {
use super::*;
Expand All @@ -91,17 +88,53 @@ mod tests {
assert_eq!(diff_objects("{}", "{}").unwrap(), "{}");
assert_eq!(diff_objects(r#"{"a": null}"#, "{}").unwrap(), "{}");
assert_eq!(diff_objects(r#"{}"#, r#"{"a": null}"#).unwrap(), "{}");
assert_eq!(diff_objects(r#"{"b": 1}"#, r#"{"a": null, "b": 1}"#).unwrap(), "{}");
assert_eq!(diff_objects(r#"{"b": 1}"#, r#"{"a": null, "b": 2}"#).unwrap(), r#"{"b":2}"#);
assert_eq!(diff_objects(r#"{"a": 0, "b": 1}"#, r#"{"a": null, "b": 2}"#).unwrap(), r#"{"a":null,"b":2}"#);
assert_eq!(diff_objects(r#"{"a": 1}"#, r#"{"a": null}"#).unwrap(), r#"{"a":null}"#);
assert_eq!(diff_objects(r#"{"a": 1}"#, r#"{}"#).unwrap(), r#"{"a":null}"#);
assert_eq!(diff_objects(r#"{"a": 1}"#, r#"{"a": 2}"#).unwrap(), r#"{"a":2}"#);
assert_eq!(diff_objects(r#"{"a": 1}"#, r#"{"a": "1"}"#).unwrap(), r#"{"a":"1"}"#);
assert_eq!(diff_objects(r#"{"a": 1}"#, r#"{"a": 1.0}"#).unwrap(), r#"{"a":1.0}"#);
assert_eq!(diff_objects(r#"{"a": 1.00}"#, r#"{"a": 1.0}"#).unwrap(), r#"{}"#);
assert_eq!(diff_objects(r#"{}"#, r#"{"a": 1.0}"#).unwrap(), r#"{"a":1.0}"#);
assert_eq!(diff_objects(r#"{}"#, r#"{"a": [1,2,3]}"#).unwrap(), r#"{"a":[1,2,3]}"#);
assert_eq!(diff_objects(r#"{"a": 1}"#, r#"{"a": [1,2,3]}"#).unwrap(), r#"{"a":[1,2,3]}"#);
assert_eq!(
diff_objects(r#"{"b": 1}"#, r#"{"a": null, "b": 1}"#).unwrap(),
"{}"
);
assert_eq!(
diff_objects(r#"{"b": 1}"#, r#"{"a": null, "b": 2}"#).unwrap(),
r#"{"b":2}"#
);
assert_eq!(
diff_objects(r#"{"a": 0, "b": 1}"#, r#"{"a": null, "b": 2}"#).unwrap(),
r#"{"a":null,"b":2}"#
);
assert_eq!(
diff_objects(r#"{"a": 1}"#, r#"{"a": null}"#).unwrap(),
r#"{"a":null}"#
);
assert_eq!(
diff_objects(r#"{"a": 1}"#, r#"{}"#).unwrap(),
r#"{"a":null}"#
);
assert_eq!(
diff_objects(r#"{"a": 1}"#, r#"{"a": 2}"#).unwrap(),
r#"{"a":2}"#
);
assert_eq!(
diff_objects(r#"{"a": 1}"#, r#"{"a": "1"}"#).unwrap(),
r#"{"a":"1"}"#
);
assert_eq!(
diff_objects(r#"{"a": 1}"#, r#"{"a": 1.0}"#).unwrap(),
r#"{"a":1.0}"#
);
assert_eq!(
diff_objects(r#"{"a": 1.00}"#, r#"{"a": 1.0}"#).unwrap(),
r#"{}"#
);
assert_eq!(
diff_objects(r#"{}"#, r#"{"a": 1.0}"#).unwrap(),
r#"{"a":1.0}"#
);
assert_eq!(
diff_objects(r#"{}"#, r#"{"a": [1,2,3]}"#).unwrap(),
r#"{"a":[1,2,3]}"#
);
assert_eq!(
diff_objects(r#"{"a": 1}"#, r#"{"a": [1,2,3]}"#).unwrap(),
r#"{"a":[1,2,3]}"#
);
}
}
69 changes: 69 additions & 0 deletions crates/core/src/json_merge.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
extern crate alloc;

use alloc::format;
use alloc::string::{String, ToString};
use core::ffi::c_int;
use core::slice;

use sqlite::ResultCode;
use sqlite_nostd as sqlite;
use sqlite_nostd::{Connection, Context, Value};

use crate::create_sqlite_text_fn;
use crate::error::SQLiteError;

/// Given any number of JSON TEXT arguments, merge them into a single JSON object.
///
/// This assumes each argument is a valid JSON object, with no duplicate keys.
/// No JSON parsing or validation is performed - this performs simple string concatenation.
fn powersync_json_merge_impl(
_ctx: *mut sqlite::context,
args: &[*mut sqlite::value],
) -> Result<String, SQLiteError> {
if args.is_empty() {
return Ok("{}".to_string());
}
let mut result = String::from("{");
for arg in args {
let chunk = arg.text();
if chunk.is_empty() || !chunk.starts_with('{') || !chunk.ends_with('}') {
return Err(SQLiteError::from(ResultCode::MISMATCH));
}

// Strip outer braces
let inner = &chunk[1..(chunk.len() - 1)];

// If this is not the first chunk, insert a comma
if result.len() > 1 {
result.push(',');
}

// Append the inner content
result.push_str(inner);
}

// Close the outer brace
result.push('}');
Ok(result)
}

create_sqlite_text_fn!(
powersync_json_merge,
powersync_json_merge_impl,
"powersync_json_merge"
);

pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> {
db.create_function_v2(
"powersync_json_merge",
-1,
sqlite::UTF8 | sqlite::DETERMINISTIC,
None,
Some(powersync_json_merge),
None,
None,
None,
)?;

Ok(())
}
2 changes: 2 additions & 0 deletions crates/core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ mod diff;
mod error;
mod ext;
mod fix035;
mod json_merge;
mod kv;
mod macros;
mod migrations;
Expand Down Expand Up @@ -55,6 +56,7 @@ fn init_extension(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> {
crate::views::register(db)?;
crate::uuid::register(db)?;
crate::diff::register(db)?;
crate::json_merge::register(db)?;
crate::view_admin::register(db)?;
crate::checkpoint::register(db)?;
crate::kv::register(db)?;
Expand Down
86 changes: 54 additions & 32 deletions crates/core/src/views.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use core::ffi::c_int;
use core::slice;

use sqlite::{Connection, Context, ResultCode, Value};
use sqlite_nostd as sqlite;
use sqlite_nostd::{self as sqlite, ManagedStmt};

use crate::create_sqlite_text_fn;
use crate::error::{PSResult, SQLiteError};
Expand Down Expand Up @@ -143,16 +143,7 @@ fn powersync_trigger_insert_sql_impl(
let local_db = ctx.db_handle();
let stmt2 = local_db.prepare_v2("select json_extract(e.value, '$.name') as name from json_each(json_extract(?, '$.columns')) e")?;
stmt2.bind_text(1, table, sqlite::Destructor::STATIC)?;

let mut column_names_quoted: Vec<String> = alloc::vec![];
while stmt2.step()? == ResultCode::ROW {
let name = stmt2.column_text(0)?;

let foo: String = format!("{:}, NEW.{:}", quote_string(name), quote_identifier(name));
column_names_quoted.push(foo);
}

let json_fragment = column_names_quoted.join(", ");
let json_fragment = json_object_fragment("NEW", &stmt2)?;

return if !local_only && !insert_only {
let trigger = format!("\
Expand All @@ -165,8 +156,8 @@ fn powersync_trigger_insert_sql_impl(
THEN RAISE (FAIL, 'id is required')
END;
INSERT INTO {:}
SELECT NEW.id, json_object({:});
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', {:}, 'id', NEW.id, 'data', json(powersync_diff('{{}}', json_object({:})))));
SELECT NEW.id, {:};
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', {:}, 'id', NEW.id, 'data', json(powersync_diff('{{}}', {:}))));
INSERT OR IGNORE INTO ps_updated_rows(row_type, row_id) VALUES({:}, NEW.id);
INSERT OR REPLACE INTO ps_buckets(name, last_op, target_op) VALUES('$local', 0, {:});
END", trigger_name, quoted_name, internal_name, json_fragment, type_string, json_fragment, type_string, MAX_OP_ID);
Expand All @@ -178,7 +169,7 @@ fn powersync_trigger_insert_sql_impl(
INSTEAD OF INSERT ON {:}
FOR EACH ROW
BEGIN
INSERT INTO {:} SELECT NEW.id, json_object({:});
INSERT INTO {:} SELECT NEW.id, {:};
END",
trigger_name, quoted_name, internal_name, json_fragment
);
Expand All @@ -189,7 +180,7 @@ fn powersync_trigger_insert_sql_impl(
INSTEAD OF INSERT ON {:}
FOR EACH ROW
BEGIN
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', {}, 'id', NEW.id, 'data', json(powersync_diff('{{}}', json_object({:})))));
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', {}, 'id', NEW.id, 'data', json(powersync_diff('{{}}', {:}))));
END", trigger_name, quoted_name, type_string, json_fragment);
Ok(trigger)
} else {
Expand Down Expand Up @@ -224,20 +215,9 @@ fn powersync_trigger_update_sql_impl(
let db = ctx.db_handle();
let stmt2 = db.prepare_v2("select json_extract(e.value, '$.name') as name from json_each(json_extract(?, '$.columns')) e").into_db_result(db)?;
stmt2.bind_text(1, table, sqlite::Destructor::STATIC)?;

let mut column_names_quoted_new: Vec<String> = alloc::vec![];
let mut column_names_quoted_old: Vec<String> = alloc::vec![];
while stmt2.step()? == ResultCode::ROW {
let name = stmt2.column_text(0)?;

let foo_new: String = format!("{:}, NEW.{:}", quote_string(name), quote_identifier(name));
column_names_quoted_new.push(foo_new);
let foo_old: String = format!("{:}, OLD.{:}", quote_string(name), quote_identifier(name));
column_names_quoted_old.push(foo_old);
}

let json_fragment_new = column_names_quoted_new.join(", ");
let json_fragment_old = column_names_quoted_old.join(", ");
let json_fragment_new = json_object_fragment("NEW", &stmt2)?;
stmt2.reset()?;
let json_fragment_old = json_object_fragment("OLD", &stmt2)?;

return if !local_only && !insert_only {
let trigger = format!("\
Expand All @@ -250,9 +230,9 @@ BEGIN
THEN RAISE (FAIL, 'Cannot update id')
END;
UPDATE {:}
SET data = json_object({:})
SET data = {:}
WHERE id = NEW.id;
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PATCH', 'type', {:}, 'id', NEW.id, 'data', json(powersync_diff(json_object({:}), json_object({:})))));
INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PATCH', 'type', {:}, 'id', NEW.id, 'data', json(powersync_diff({:}, {:}))));
INSERT OR IGNORE INTO ps_updated_rows(row_type, row_id) VALUES({:}, NEW.id);
INSERT OR REPLACE INTO ps_buckets(name, last_op, target_op) VALUES('$local', 0, {:});
END", trigger_name, quoted_name, internal_name, json_fragment_new, type_string, json_fragment_old, json_fragment_new, type_string, MAX_OP_ID);
Expand All @@ -269,7 +249,7 @@ BEGIN
THEN RAISE (FAIL, 'Cannot update id')
END;
UPDATE {:}
SET data = json_object({:})
SET data = {:}
WHERE id = NEW.id;
END",
trigger_name, quoted_name, internal_name, json_fragment_new
Expand Down Expand Up @@ -335,3 +315,45 @@ pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> {

Ok(())
}

/// Given a query returning column names, return a JSON object fragment for a trigger.
///
/// Example output with prefix "NEW": "json_object('id', NEW.id, 'name', NEW.name, 'age', NEW.age)".
fn json_object_fragment(prefix: &str, name_results: &ManagedStmt) -> Result<String, SQLiteError> {
// floor(SQLITE_MAX_FUNCTION_ARG / 2).
// To keep databases portable, we use the default limit of 100 args for this,
// and don't try to query the limit dynamically.
const MAX_ARG_COUNT: usize = 50;

let mut column_names_quoted: Vec<String> = alloc::vec![];
while name_results.step()? == ResultCode::ROW {
let name = name_results.column_text(0)?;

let quoted: String = format!(
"{:}, {:}.{:}",
quote_string(name),
prefix,
quote_identifier(name)
);
column_names_quoted.push(quoted);
}

// SQLITE_MAX_COLUMN - 1 (because of the id column)
if column_names_quoted.len() > 1999 {
return Err(SQLiteError::from(ResultCode::TOOBIG));
} else if column_names_quoted.len() <= MAX_ARG_COUNT {
// Small number of columns - use json_object() directly.
let json_fragment = column_names_quoted.join(", ");
return Ok(format!("json_object({:})", json_fragment));
} else {
// Too many columns to use json_object directly.
// Instead, we build up the JSON object in chunks,
// and merge using powersync_json_merge().
let mut fragments: Vec<String> = alloc::vec![];
for chunk in column_names_quoted.chunks(MAX_ARG_COUNT) {
let sub_fragment = chunk.join(", ");
fragments.push(format!("json_object({:})", sub_fragment));
}
return Ok(format!("powersync_json_merge({:})", fragments.join(", ")));
}
}
Loading
Loading