Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support new JSON #245

Draft
wants to merge 1 commit into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions lib/ch/row_binary.ex
Original file line number Diff line number Diff line change
Expand Up @@ -516,6 +516,7 @@ defmodule Ch.RowBinary do
:uuid,
:date,
:date32,
:json,
:ipv4,
:ipv6,
:point,
Expand Down Expand Up @@ -697,6 +698,20 @@ defmodule Ch.RowBinary do
defp utf8_size(codepoint) when codepoint <= 0xFFFF, do: 3
defp utf8_size(codepoint) when codepoint <= 0x10FFFF, do: 4

@compile inline: [decode_json_decode_rows: 5]

for {pattern, size} <- varints do
defp decode_json_decode_rows(
<<unquote(pattern), s::size(unquote(size))-bytes, bin::bytes>>,
types_rest,
row,
rows,
types
) do
decode_rows(types_rest, bin, [Jason.decode!(s) | row], rows, types)
end
end

@compile inline: [decode_binary_decode_rows: 5]

for {pattern, size} <- varints do
Expand Down Expand Up @@ -865,6 +880,9 @@ defmodule Ch.RowBinary do
<<d::32-little-signed, bin::bytes>> = bin
decode_rows(types_rest, bin, [Date.add(@epoch_date, d) | row], rows, types)

:json ->
decode_json_decode_rows(bin, types_rest, row, rows, types)

{:datetime, timezone} ->
<<s::32-little, bin::bytes>> = bin

Expand Down
1 change: 1 addition & 0 deletions lib/ch/types.ex
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ defmodule Ch.Types do
# {"DateTime", :datetime, []},
{"Date32", :date32, []},
{"Date", :date, []},
{"JSON", :json, []},
{"LowCardinality", :low_cardinality, [:type]},
for size <- [32, 64, 128, 256] do
{"Decimal#{size}", :"decimal#{size}", [:int]}
Expand Down
90 changes: 84 additions & 6 deletions test/ch/connection_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -568,20 +568,98 @@
}} = Ch.query(conn, "SELECT * FROM t_uuid ORDER BY y")
end

test "read json as string", %{conn: conn} do

Check failure on line 571 in test/ch/connection_test.exs

View workflow job for this annotation

GitHub Actions / test (1.17.1, 27, 24.12.2.29, UTC)

test types read json as string (Ch.ConnectionTest)
assert Ch.query!(conn, ~s|select '{"a":42}'::JSON|, [],
settings: [
enable_json_type: 1,
output_format_binary_write_json_as_string: 1
]
).rows == [[%{"a" => "42"}]]
end

@tag :skip
test "read json with invalid utf8 string", %{conn: conn} do
assert Ch.query!(
conn,
~s|select map('a', 42)::JSON|,
%{"bin" => "\x61\xF0\x80\x80\x80b"},
settings: [
enable_json_type: 1,
output_format_binary_write_json_as_string: 1
]
).rows == [[%{"a" => "a����b"}]]
end

test "write->read json as string", %{conn: conn} do
Ch.query!(conn, "CREATE TABLE test_write_json(json JSON) ENGINE = Memory", [],
settings: [
enable_json_type: 1
]
)

rowbinary =
Ch.RowBinary.encode_rows(
[
[Jason.encode_to_iodata!(%{"a" => 42})],
[Jason.encode_to_iodata!(%{"b" => 10})]
],
_types = [:string]
)

Ch.query!(conn, ["insert into test_write_json(json) format RowBinary\n" | rowbinary], [],
settings: [
enable_json_type: 1,
input_format_binary_read_json_as_string: 1
]
)

assert Ch.query!(conn, "select json from test_write_json", [],
settings: [
enable_json_type: 1,
output_format_binary_write_json_as_string: 1
]
).rows ==
[[%{"a" => "42"}], [%{"b" => "10"}]]
end

# https://clickhouse.com/docs/en/sql-reference/data-types/newjson
# https://clickhouse.com/docs/en/integrations/data-formats/json/overview
# https://clickhouse.com/blog/a-new-powerful-json-data-type-for-clickhouse
# https://clickhouse.com/blog/json-bench-clickhouse-vs-mongodb-elasticsearch-duckdb-postgresql
# https://github.com/ClickHouse/ClickHouse/pull/70288
# https://github.com/ClickHouse/ClickHouse/blob/master/src/Core/TypeId.h
@tag :skip
test "json", %{conn: conn} do
settings = [allow_experimental_object_type: 1]
settings = [enable_json_type: 1]

Ch.query!(conn, "CREATE TABLE json(o JSON) ENGINE = Memory", [], settings: settings)
assert Ch.query!(
conn,
~s|select '{"a":42,"b":10}'::JSON|,
[],
settings: settings,
decode: false,
format: "RowBinary"
).rows == [
<<2, 1, 97, 10, 42, 0, 0, 0, 0, 0, 0, 0, 1, 98, 10, 10, 0, 0, 0, 0, 0, 0, 0>>
]

# Ch.query!(conn, "CREATE TABLE test_json(json JSON) ENGINE = Memory", [], settings: settings)

Ch.query!(conn, ~s|INSERT INTO json VALUES ('{"a": 1, "b": { "c": 2, "d": [1, 2, 3] }}')|)
# Ch.query!(
# conn,
# ~s|INSERT INTO test_json VALUES ('{"a" : {"b" : 42}, "c" : [1, 2, 3]}'), ('{"f" : "Hello, World!"}'), ('{"a" : {"b" : 43, "e" : 10}, "c" : [4, 5, 6]}')|
# )

assert Ch.query!(conn, "SELECT o.a, o.b.c, o.b.d[3] FROM json").rows == [[1, 2, 3]]
# assert Ch.query!(conn, "SELECT json FROM test_json") == :asdf

# named tuples are not supported yet
assert_raise ArgumentError, fn -> Ch.query!(conn, "SELECT o FROM json") end
# assert Ch.query!(conn, "SELECT json.a, json.b.c, json.b.d[3] FROM test_json").rows == [
# [1, 2, 3]
# ]
end

# TODO variant (is there?)
# TODO dynamic

# TODO enum16

test "enum8", %{conn: conn} do
Expand Down
86 changes: 86 additions & 0 deletions test/ch/json_test.exs
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
defmodule Ch.JSONTest do
use ExUnit.Case

setup do
conn =
start_supervised!(
{Ch,
database: Ch.Test.database(),
settings: [
enable_json_type: 1,
input_format_binary_read_json_as_string: 1,
output_format_binary_write_json_as_string: 1
]}
)

{:ok, conn: conn}
end

# https://clickhouse.com/docs/en/sql-reference/data-types/newjson#creating-json
test "Creating JSON", %{conn: conn} do
Ch.query!(conn, "CREATE TABLE test (json JSON) ENGINE = Memory")
on_exit(fn -> Ch.Test.sql_exec("DROP TABLE test") end)

Ch.query!(conn, """
INSERT INTO test VALUES
('{"a" : {"b" : 42}, "c" : [1, 2, 3]}'),
('{"f" : "Hello, World!"}'),
('{"a" : {"b" : 43, "e" : 10}, "c" : [4, 5, 6]}')
""")

assert Ch.query!(conn, "SELECT json FROM test").rows == [
[%{"a" => %{"b" => "42"}, "c" => ["1", "2", "3"]}],
[%{"f" => "Hello, World!"}],
[%{"a" => %{"b" => "43", "e" => "10"}, "c" => ["4", "5", "6"]}]
]
end

@tag :skip
test "Creating JSON (explicit types and SKIP)", %{conn: conn} do
Ch.query!(conn, "CREATE TABLE test (json JSON(a.b UInt32, SKIP a.e)) ENGINE = Memory")
on_exit(fn -> Ch.Test.sql_exec("DROP TABLE test") end)

Ch.query!(conn, """
INSERT INTO test VALUES
('{"a" : {"b" : 42}, "c" : [1, 2, 3]}'),
('{"f" : "Hello, World!"}'),
('{"a" : {"b" : 43, "e" : 10}, "c" : [4, 5, 6]}')
""")

assert Ch.query!(conn, "SELECT json FROM test").rows == []
end

test "Creating JSON using CAST from String", %{conn: conn} do

Check failure on line 53 in test/ch/json_test.exs

View workflow job for this annotation

GitHub Actions / test (1.17.1, 27, 24.12.2.29, UTC)

test Creating JSON using CAST from String (Ch.JSONTest)
assert Ch.query!(conn, """
SELECT '{"a" : {"b" : 42},"c" : [1, 2, 3], "d" : "Hello, World!"}'::JSON AS json
""").rows == [
[%{"a" => %{"b" => "42"}, "c" => ["1", "2", "3"], "d" => "Hello, World!"}]
]
end

test "Creating JSON using CAST from Tuple", %{conn: conn} do

Check failure on line 61 in test/ch/json_test.exs

View workflow job for this annotation

GitHub Actions / test (1.17.1, 27, 24.12.2.29, UTC)

test Creating JSON using CAST from Tuple (Ch.JSONTest)
assert Ch.query!(
conn,
"""
SELECT (tuple(42 AS b) AS a, [1, 2, 3] AS c, 'Hello, World!' AS d)::JSON AS json
""",
[],
settings: [enable_named_columns_in_function_tuple: 1]
).rows == [[%{"a" => %{"b" => "42"}, "c" => ["1", "2", "3"], "d" => "Hello, World!"}]]
end

test "Creating JSON using CAST from Map", %{conn: conn} do

Check failure on line 72 in test/ch/json_test.exs

View workflow job for this annotation

GitHub Actions / test (1.17.1, 27, 24.12.2.29, UTC)

test Creating JSON using CAST from Map (Ch.JSONTest)
assert Ch.query!(
conn,
"""
SELECT map('a', map('b', 42), 'c', [1,2,3], 'd', 'Hello, World!')::JSON AS json;
""",
[],
settings: [enable_variant_type: 1, use_variant_as_common_type: 1]
).rows == [[%{"a" => %{"b" => "42"}, "c" => ["1", "2", "3"], "d" => "Hello, World!"}]]
end

# https://clickhouse.com/docs/en/sql-reference/data-types/newjson#reading-json-paths-as-subcolumns
test "Reading JSON paths as subcolumns", %{conn: _conn} do
end
end
Loading