Skip to content

Commit

Permalink
eh?
Browse files Browse the repository at this point in the history
  • Loading branch information
ruslandoga committed Feb 9, 2025
1 parent 1874e71 commit 29941c6
Show file tree
Hide file tree
Showing 2 changed files with 58 additions and 1 deletion.
16 changes: 15 additions & 1 deletion lib/ch/row_binary.ex
Original file line number Diff line number Diff line change
Expand Up @@ -698,6 +698,20 @@ defmodule Ch.RowBinary do
defp utf8_size(codepoint) when codepoint <= 0xFFFF, do: 3
defp utf8_size(codepoint) when codepoint <= 0x10FFFF, do: 4

@compile inline: [decode_json_decode_rows: 5]

for {pattern, size} <- varints do
defp decode_json_decode_rows(
<<unquote(pattern), s::size(unquote(size))-bytes, bin::bytes>>,
types_rest,
row,
rows,
types
) do
decode_rows(types_rest, bin, [:json.decode(s) | row], rows, types)

Check warning on line 711 in lib/ch/row_binary.ex

View workflow job for this annotation

GitHub Actions / test (1.14, 25, latest, UTC)

:json.decode/1 is undefined (module :json is not available or is yet to be defined)

Check warning on line 711 in lib/ch/row_binary.ex

View workflow job for this annotation

GitHub Actions / test (1.14, 26, latest, UTC)

:json.decode/1 is undefined (module :json is not available or is yet to be defined)

Check warning on line 711 in lib/ch/row_binary.ex

View workflow job for this annotation

GitHub Actions / test (1.15, 25, latest, UTC)

:json.decode/1 is undefined (module :json is not available or is yet to be defined)

Check warning on line 711 in lib/ch/row_binary.ex

View workflow job for this annotation

GitHub Actions / test (1.15, 26, latest, UTC)

:json.decode/1 is undefined (module :json is not available or is yet to be defined)
end
end

@compile inline: [decode_binary_decode_rows: 5]

for {pattern, size} <- varints do
Expand Down Expand Up @@ -867,7 +881,7 @@ defmodule Ch.RowBinary do
decode_rows(types_rest, bin, [Date.add(@epoch_date, d) | row], rows, types)

:json ->
raise ArgumentError, "JSON type is not supported for decoding: #{inspect(bin)}"
decode_json_decode_rows(bin, types_rest, row, rows, types)

{:datetime, timezone} ->
<<s::32-little, bin::bytes>> = bin
Expand Down
43 changes: 43 additions & 0 deletions test/ch/connection_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -568,12 +568,55 @@ defmodule Ch.ConnectionTest do
}} = Ch.query(conn, "SELECT * FROM t_uuid ORDER BY y")
end

# TODO non utf8
test "read json as string", %{conn: conn} do

Check failure on line 572 in test/ch/connection_test.exs

View workflow job for this annotation

GitHub Actions / test (1.17.1, 27, 24.12.2.29, UTC)

test types read json as string (Ch.ConnectionTest)
assert Ch.query!(conn, ~s|select '{"a":42}'::JSON|, [],
settings: [
enable_json_type: 1,
output_format_binary_write_json_as_string: 1
]
).rows == [[%{"a" => "42"}]]
end

test "write->read json as string", %{conn: conn} do
Ch.query!(conn, "CREATE TABLE test_write_json(json JSON) ENGINE = Memory", [],
settings: [
enable_json_type: 1
]
)

rowbinary =
Ch.RowBinary.encode_rows(
[
[:json.encode(%{"a" => 42})],
[:json.encode(%{"b" => 10})]
],
_types = [:string]
)

Ch.query!(conn, ["insert into test_write_json(json) format RowBinary\n" | rowbinary], [],
settings: [
enable_json_type: 1,
input_format_binary_read_json_as_string: 1
]
)

assert Ch.query!(conn, "select json from test_write_json", [],
settings: [
enable_json_type: 1,
output_format_binary_write_json_as_string: 1
]
).rows ==
[[%{"a" => "42"}], [%{"b" => "10"}]]
end

# https://clickhouse.com/docs/en/sql-reference/data-types/newjson
# https://clickhouse.com/docs/en/integrations/data-formats/json/overview
# https://clickhouse.com/blog/a-new-powerful-json-data-type-for-clickhouse
# https://clickhouse.com/blog/json-bench-clickhouse-vs-mongodb-elasticsearch-duckdb-postgresql
# https://github.com/ClickHouse/ClickHouse/pull/70288
# https://github.com/ClickHouse/ClickHouse/blob/master/src/Core/TypeId.h
@tag :skip
test "json", %{conn: conn} do
settings = [enable_json_type: 1]

Expand Down

0 comments on commit 29941c6

Please sign in to comment.