diff --git a/.typos.toml b/.typos.toml
index c98a52a..1b41395 100644
--- a/.typos.toml
+++ b/.typos.toml
@@ -1,3 +1,4 @@
[default.extend-words]
"som" = "som" # ./test/ch/ecto_type_test.exs
"ECT" = "ECT" # ./test/ch/query_test.exs
+"Evn" = "Evn" # ./CHANGELOG.md
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1c7406c..fc45c91 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,50 @@
# Changelog
+## Unreleased
+
+- added support for `multipart/form-data` in queries: https://github.com/plausible/ch/pull/290 -- which allows bypassing URL length limits sometimes imposed by reverse proxies when sending queries with many parameters.
+
+ ⚠️ This is currently **opt-in** per query ⚠️
+
+ Global support for the entire connection pool is planned for a future release.
+
+ **Usage**
+
+ Pass `multipart: true` in the options list for `Ch.query/4`
+
+ ```elixir
+ # Example usage
+ Ch.query(pool, "SELECT {a:String}, {b:String}", %{"a" => "A", "b" => "B"}, multipart: true)
+ ```
+
+
+ View raw request format reference
+
+ ```http
+ POST / HTTP/1.1
+ content-length: 387
+ host: localhost:8123
+ user-agent: ch/0.6.2-dev
+ x-clickhouse-format: RowBinaryWithNamesAndTypes
+ content-type: multipart/form-data; boundary="ChFormBoundaryZZlfchKTcd8ToWjEvn66i3lAxNJ_T9dw"
+
+ --ChFormBoundaryZZlfchKTcd8ToWjEvn66i3lAxNJ_T9dw
+ content-disposition: form-data; name="param_a"
+
+ A
+ --ChFormBoundaryZZlfchKTcd8ToWjEvn66i3lAxNJ_T9dw
+ content-disposition: form-data; name="param_b"
+
+ B
+ --ChFormBoundaryZZlfchKTcd8ToWjEvn66i3lAxNJ_T9dw
+ content-disposition: form-data; name="query"
+
+ select {a:String}, {b:String}
+ --ChFormBoundaryZZlfchKTcd8ToWjEvn66i3lAxNJ_T9dw--
+ ```
+
+
+
## 0.6.1 (2025-12-04)
- handle disconnect during stream https://github.com/plausible/ch/pull/283
diff --git a/README.md b/README.md
index b6cb6dd..2b773b4 100644
--- a/README.md
+++ b/README.md
@@ -67,6 +67,24 @@ Note on datetime encoding in query parameters:
- `%NaiveDateTime{}` is encoded as text to make it assume the column's or ClickHouse server's timezone
- `%DateTime{}` is encoded as unix timestamp and is treated as UTC timestamp by ClickHouse
+#### Select rows (lots of params, reverse proxy)
+
+For queries with many parameters the resulting URL can become too long for some reverse proxies, resulting in a `414 Request-URI Too Large` error.
+
+To avoid this, you can use the `multipart: true` option to send the query and parameters in the request body.
+
+```elixir
+{:ok, pid} = Ch.start_link()
+
+# Moves parameters from the URL to a multipart/form-data body
+%Ch.Result{rows: [[[1, 2, 3 | _rest]]]} =
+ Ch.query!(pid, "SELECT {ids:Array(UInt64)}", %{"ids" => Enum.to_list(1..10_000)}, multipart: true)
+```
+
+> [!NOTE]
+>
+> `multipart: true` is currently required on each individual query. Support for pool-wide configuration is planned for a future release.
+
#### Insert rows
```elixir
diff --git a/lib/ch.ex b/lib/ch.ex
index 8b12e2c..989a991 100644
--- a/lib/ch.ex
+++ b/lib/ch.ex
@@ -59,6 +59,7 @@ defmodule Ch do
# TODO remove
| {:encode, boolean}
| {:decode, boolean}
+ | {:multipart, boolean}
| DBConnection.connection_option()
@doc """
@@ -76,6 +77,7 @@ defmodule Ch do
* `:headers` - Custom HTTP headers for the request
* `:format` - Custom response format for the request
* `:decode` - Whether to automatically decode the response
+ * `:multipart` - Whether to send the query as multipart/form-data
* [`DBConnection.connection_option()`](https://hexdocs.pm/db_connection/DBConnection.html#t:connection_option/0)
"""
diff --git a/lib/ch/query.ex b/lib/ch/query.ex
index ac19e81..d6d077a 100644
--- a/lib/ch/query.ex
+++ b/lib/ch/query.ex
@@ -1,8 +1,14 @@
defmodule Ch.Query do
@moduledoc "Query struct wrapping the SQL statement."
- defstruct [:statement, :command, :encode, :decode]
+ defstruct [:statement, :command, :encode, :decode, :multipart]
- @type t :: %__MODULE__{statement: iodata, command: command, encode: boolean, decode: boolean}
+ @type t :: %__MODULE__{
+ statement: iodata,
+ command: command,
+ encode: boolean,
+ decode: boolean,
+ multipart: boolean
+ }
@doc false
@spec build(iodata, [Ch.query_option()]) :: t
@@ -10,7 +16,15 @@ defmodule Ch.Query do
command = Keyword.get(opts, :command) || extract_command(statement)
encode = Keyword.get(opts, :encode, true)
decode = Keyword.get(opts, :decode, true)
- %__MODULE__{statement: statement, command: command, encode: encode, decode: decode}
+ multipart = Keyword.get(opts, :multipart, false)
+
+ %__MODULE__{
+ statement: statement,
+ command: command,
+ encode: encode,
+ decode: decode,
+ multipart: multipart
+ }
end
statements = [
@@ -72,6 +86,7 @@ defmodule Ch.Query do
end
defimpl DBConnection.Query, for: Ch.Query do
+ @dialyzer :no_improper_lists
alias Ch.{Query, Result, RowBinary}
@spec parse(Query.t(), [Ch.query_option()]) :: Query.t()
@@ -128,6 +143,22 @@ defimpl DBConnection.Query, for: Ch.Query do
end
end
+ def encode(%Query{multipart: true, statement: statement}, params, opts) do
+ types = Keyword.get(opts, :types)
+ default_format = if types, do: "RowBinary", else: "RowBinaryWithNamesAndTypes"
+ format = Keyword.get(opts, :format) || default_format
+
+ boundary = "ChFormBoundary" <> Base.url_encode64(:crypto.strong_rand_bytes(24))
+ content_type = "multipart/form-data; boundary=\"#{boundary}\""
+ enc_boundary = "--#{boundary}\r\n"
+ multipart = multipart_params(params, enc_boundary)
+ multipart = add_multipart_part(multipart, "query", statement, enc_boundary)
+ multipart = [multipart | "--#{boundary}--\r\n"]
+
+ {_no_query_params = [],
+ [{"x-clickhouse-format", format}, {"content-type", content_type} | headers(opts)], multipart}
+ end
+
def encode(%Query{statement: statement}, params, opts) do
types = Keyword.get(opts, :types)
default_format = if types, do: "RowBinary", else: "RowBinaryWithNamesAndTypes"
@@ -135,6 +166,59 @@ defimpl DBConnection.Query, for: Ch.Query do
{query_params(params), [{"x-clickhouse-format", format} | headers(opts)], statement}
end
+ defp multipart_params(params, boundary) when is_map(params) do
+ multipart_named_params(Map.to_list(params), boundary, [])
+ end
+
+ defp multipart_params(params, boundary) when is_list(params) do
+ multipart_positional_params(params, 0, boundary, [])
+ end
+
+ defp multipart_named_params([{name, value} | params], boundary, acc) do
+ acc =
+ add_multipart_part(
+ acc,
+ "param_" <> URI.encode_www_form(name),
+ encode_param(value),
+ boundary
+ )
+
+ multipart_named_params(params, boundary, acc)
+ end
+
+ defp multipart_named_params([], _boundary, acc), do: acc
+
+ defp multipart_positional_params([value | params], idx, boundary, acc) do
+ acc =
+ add_multipart_part(
+ acc,
+ "param_$" <> Integer.to_string(idx),
+ encode_param(value),
+ boundary
+ )
+
+ multipart_positional_params(params, idx + 1, boundary, acc)
+ end
+
+ defp multipart_positional_params([], _idx, _boundary, acc), do: acc
+
+ @compile inline: [add_multipart_part: 4]
+ defp add_multipart_part(multipart, name, value, boundary) do
+ part = [
+ boundary,
+ "content-disposition: form-data; name=\"",
+ name,
+ "\"\r\n\r\n",
+ value,
+ "\r\n"
+ ]
+
+ case multipart do
+ [] -> part
+ _ -> [multipart | part]
+ end
+ end
+
defp format_row_binary?(statement) when is_binary(statement) do
statement |> String.trim_trailing() |> String.ends_with?("RowBinary")
end
diff --git a/test/ch/aggregation_test.exs b/test/ch/aggregation_test.exs
index 37bd8a3..651622c 100644
--- a/test/ch/aggregation_test.exs
+++ b/test/ch/aggregation_test.exs
@@ -1,5 +1,5 @@
defmodule Ch.AggregationTest do
- use ExUnit.Case
+ use ExUnit.Case, async: true
setup do
conn = start_supervised!({Ch, database: Ch.Test.database()})
diff --git a/test/ch/connection_test.exs b/test/ch/connection_test.exs
index 29a0265..b7d1b45 100644
--- a/test/ch/connection_test.exs
+++ b/test/ch/connection_test.exs
@@ -1,62 +1,78 @@
defmodule Ch.ConnectionTest do
- use ExUnit.Case
+ use ExUnit.Case, parameterize: [%{query_options: []}, %{query_options: [multipart: true]}]
+
+ import Ch.Test,
+ only: [
+ parameterize_query: 2,
+ parameterize_query: 3,
+ parameterize_query: 4,
+ parameterize_query!: 2,
+ parameterize_query!: 3,
+ parameterize_query!: 4
+ ]
+
alias Ch.RowBinary
setup do
{:ok, conn: start_supervised!({Ch, database: Ch.Test.database()})}
end
- test "select without params", %{conn: conn} do
- assert {:ok, %{num_rows: 1, rows: [[1]]}} = Ch.query(conn, "select 1")
+ test "select without params", ctx do
+ assert {:ok, %{num_rows: 1, rows: [[1]]}} =
+ parameterize_query(ctx, "select 1")
end
- test "select with types", %{conn: conn} do
- assert {:ok, %{num_rows: 1, rows: [[1]]}} = Ch.query(conn, "select 1", [], types: ["UInt8"])
+ test "select with types", ctx do
+ assert {:ok, %{num_rows: 1, rows: [[1]]}} =
+ parameterize_query(ctx, "select 1", [], types: ["UInt8"])
end
- test "select with params", %{conn: conn} do
- assert {:ok, %{num_rows: 1, rows: [[1]]}} = Ch.query(conn, "select {a:UInt8}", %{"a" => 1})
+ test "select with params", ctx do
+ assert {:ok, %{num_rows: 1, rows: [[1]]}} =
+ parameterize_query(ctx, "select {a:UInt8}", %{"a" => 1})
assert {:ok, %{num_rows: 1, rows: [[true]]}} =
- Ch.query(conn, "select {b:Bool}", %{"b" => true})
+ parameterize_query(ctx, "select {b:Bool}", %{"b" => true})
assert {:ok, %{num_rows: 1, rows: [[false]]}} =
- Ch.query(conn, "select {b:Bool}", %{"b" => false})
+ parameterize_query(ctx, "select {b:Bool}", %{"b" => false})
assert {:ok, %{num_rows: 1, rows: [[nil]]}} =
- Ch.query(conn, "select {n:Nullable(Nothing)}", %{"n" => nil})
+ parameterize_query(ctx, "select {n:Nullable(Nothing)}", %{"n" => nil})
assert {:ok, %{num_rows: 1, rows: [[1.0]]}} =
- Ch.query(conn, "select {a:Float32}", %{"a" => 1.0})
+ parameterize_query(ctx, "select {a:Float32}", %{"a" => 1.0})
assert {:ok, %{num_rows: 1, rows: [["a&b=c"]]}} =
- Ch.query(conn, "select {a:String}", %{"a" => "a&b=c"})
+ parameterize_query(ctx, "select {a:String}", %{"a" => "a&b=c"})
assert {:ok, %{num_rows: 1, rows: [["a\n"]]}} =
- Ch.query(conn, "select {a:String}", %{"a" => "a\n"})
+ parameterize_query(ctx, "select {a:String}", %{"a" => "a\n"})
assert {:ok, %{num_rows: 1, rows: [["a\t"]]}} =
- Ch.query(conn, "select {a:String}", %{"a" => "a\t"})
+ parameterize_query(ctx, "select {a:String}", %{"a" => "a\t"})
assert {:ok, %{num_rows: 1, rows: [[["a\tb"]]]}} =
- Ch.query(conn, "select {a:Array(String)}", %{"a" => ["a\tb"]})
+ parameterize_query(ctx, "select {a:Array(String)}", %{"a" => ["a\tb"]})
assert {:ok, %{num_rows: 1, rows: [[[true, false]]]}} =
- Ch.query(conn, "select {a:Array(Bool)}", %{"a" => [true, false]})
+ parameterize_query(ctx, "select {a:Array(Bool)}", %{"a" => [true, false]})
assert {:ok, %{num_rows: 1, rows: [[["a", nil, "b"]]]}} =
- Ch.query(conn, "select {a:Array(Nullable(String))}", %{"a" => ["a", nil, "b"]})
+ parameterize_query(ctx, "select {a:Array(Nullable(String))}", %{
+ "a" => ["a", nil, "b"]
+ })
assert {:ok, %{num_rows: 1, rows: [row]}} =
- Ch.query(conn, "select {a:Decimal(9,4)}", %{"a" => Decimal.new("2000.333")})
+ parameterize_query(ctx, "select {a:Decimal(9,4)}", %{"a" => Decimal.new("2000.333")})
assert row == [Decimal.new("2000.3330")]
assert {:ok, %{num_rows: 1, rows: [[~D[2022-01-01]]]}} =
- Ch.query(conn, "select {a:Date}", %{"a" => ~D[2022-01-01]})
+ parameterize_query(ctx, "select {a:Date}", %{"a" => ~D[2022-01-01]})
assert {:ok, %{num_rows: 1, rows: [[~D[2022-01-01]]]}} =
- Ch.query(conn, "select {a:Date32}", %{"a" => ~D[2022-01-01]})
+ parameterize_query(ctx, "select {a:Date32}", %{"a" => ~D[2022-01-01]})
naive_noon = ~N[2022-01-01 12:00:00]
@@ -64,7 +80,7 @@ defmodule Ch.ConnectionTest do
# see https://clickhouse.com/docs/en/sql-reference/data-types/datetime
# https://kb.altinity.com/altinity-kb-queries-and-syntax/time-zones/
assert {:ok, %{num_rows: 1, rows: [[naive_datetime]], headers: headers}} =
- Ch.query(conn, "select {naive:DateTime}", %{"naive" => naive_noon})
+ parameterize_query(ctx, "select {naive:DateTime}", %{"naive" => naive_noon})
# to make this test pass for contributors with non UTC timezone we perform the same steps as ClickHouse
# i.e. we give server timezone to the naive datetime and shift it to UTC before comparing with the result
@@ -78,17 +94,17 @@ defmodule Ch.ConnectionTest do
# when the timezone information is provided in the type, we don't need to rely on server timezone
assert {:ok, %{num_rows: 1, rows: [[bkk_datetime]]}} =
- Ch.query(conn, "select {$0:DateTime('Asia/Bangkok')}", [naive_noon])
+ parameterize_query(ctx, "select {$0:DateTime('Asia/Bangkok')}", [naive_noon])
assert bkk_datetime == DateTime.from_naive!(naive_noon, "Asia/Bangkok")
assert {:ok, %{num_rows: 1, rows: [[~U[2022-01-01 12:00:00Z]]]}} =
- Ch.query(conn, "select {$0:DateTime('UTC')}", [naive_noon])
+ parameterize_query(ctx, "select {$0:DateTime('UTC')}", [naive_noon])
naive_noon_ms = ~N[2022-01-01 12:00:00.123]
assert {:ok, %{num_rows: 1, rows: [[naive_datetime]]}} =
- Ch.query(conn, "select {$0:DateTime64(3)}", [naive_noon_ms])
+ parameterize_query(ctx, "select {$0:DateTime64(3)}", [naive_noon_ms])
assert NaiveDateTime.compare(
naive_datetime,
@@ -99,66 +115,70 @@ defmodule Ch.ConnectionTest do
) == :eq
assert {:ok, %{num_rows: 1, rows: [[["a", "b'", "\\'c"]]]}} =
- Ch.query(conn, "select {a:Array(String)}", %{"a" => ["a", "b'", "\\'c"]})
+ parameterize_query(ctx, "select {a:Array(String)}", %{"a" => ["a", "b'", "\\'c"]})
assert {:ok, %{num_rows: 1, rows: [[["a\n", "b\tc"]]]}} =
- Ch.query(conn, "select {a:Array(String)}", %{"a" => ["a\n", "b\tc"]})
+ parameterize_query(ctx, "select {a:Array(String)}", %{"a" => ["a\n", "b\tc"]})
assert {:ok, %{num_rows: 1, rows: [[[1, 2, 3]]]}} =
- Ch.query(conn, "select {a:Array(UInt8)}", %{"a" => [1, 2, 3]})
+ parameterize_query(ctx, "select {a:Array(UInt8)}", %{"a" => [1, 2, 3]})
assert {:ok, %{num_rows: 1, rows: [[[[1], [2, 3], []]]]}} =
- Ch.query(conn, "select {a:Array(Array(UInt8))}", %{"a" => [[1], [2, 3], []]})
+ parameterize_query(ctx, "select {a:Array(Array(UInt8))}", %{"a" => [[1], [2, 3], []]})
uuid = "9B29BD20-924C-4DE5-BDB3-8C2AA1FCE1FC"
uuid_bin = uuid |> String.replace("-", "") |> Base.decode16!()
assert {:ok, %{num_rows: 1, rows: [[^uuid_bin]]}} =
- Ch.query(conn, "select {a:UUID}", %{"a" => uuid})
+ parameterize_query(ctx, "select {a:UUID}", %{"a" => uuid})
# TODO
# assert {:ok, %{num_rows: 1, rows: [[^uuid_bin]]}} =
- # Ch.query(conn, "select {a:UUID}", %{"a" => uuid_bin})
+ # parameterize_query(ctx, "select {a:UUID}", %{"a" => uuid_bin})
# pseudo-positional bind
- assert {:ok, %{num_rows: 1, rows: [[1]]}} = Ch.query(conn, "select {$0:UInt8}", [1])
+ assert {:ok, %{num_rows: 1, rows: [[1]]}} = parameterize_query(ctx, "select {$0:UInt8}", [1])
end
- test "utc datetime query param encoding", %{conn: conn} do
+ test "utc datetime query param encoding", ctx do
utc = ~U[2021-01-01 12:00:00Z]
msk = DateTime.new!(~D[2021-01-01], ~T[15:00:00], "Europe/Moscow")
- naive = utc |> DateTime.shift_zone!(Ch.Test.clickhouse_tz(conn)) |> DateTime.to_naive()
+ naive = utc |> DateTime.shift_zone!(Ch.Test.clickhouse_tz(ctx.conn)) |> DateTime.to_naive()
- assert Ch.query!(conn, "select {$0:DateTime} as d, toString(d)", [utc]).rows ==
+ assert parameterize_query!(ctx, "select {$0:DateTime} as d, toString(d)", [utc]).rows ==
[[~N[2021-01-01 12:00:00], to_string(naive)]]
- assert Ch.query!(conn, "select {$0:DateTime('UTC')} as d, toString(d)", [utc]).rows ==
+ assert parameterize_query!(ctx, "select {$0:DateTime('UTC')} as d, toString(d)", [utc]).rows ==
[[utc, "2021-01-01 12:00:00"]]
- assert Ch.query!(conn, "select {$0:DateTime('Europe/Moscow')} as d, toString(d)", [utc]).rows ==
+ assert parameterize_query!(ctx, "select {$0:DateTime('Europe/Moscow')} as d, toString(d)", [
+ utc
+ ]).rows ==
[[msk, "2021-01-01 15:00:00"]]
end
- test "non-utc datetime query param encoding", %{conn: conn} do
+ test "non-utc datetime query param encoding", ctx do
jp = DateTime.shift_zone!(~U[2021-01-01 12:34:56Z], "Asia/Tokyo")
assert inspect(jp) == "#DateTime<2021-01-01 21:34:56+09:00 JST Asia/Tokyo>"
assert [[utc, jp]] =
- Ch.query!(conn, "select {$0:DateTime('UTC')}, {$0:DateTime('Asia/Tokyo')}", [jp]).rows
+ parameterize_query!(
+ ctx,
+ "select {$0:DateTime('UTC')}, {$0:DateTime('Asia/Tokyo')}",
+ [jp]
+ ).rows
assert inspect(utc) == "~U[2021-01-01 12:34:56Z]"
assert inspect(jp) == "#DateTime<2021-01-01 21:34:56+09:00 JST Asia/Tokyo>"
end
- test "non-utc datetime rowbinary encoding", %{conn: conn} do
- Ch.query!(
- conn,
+ test "non-utc datetime rowbinary encoding", ctx do
+ parameterize_query!(
+ ctx,
"create table ch_non_utc_datetimes(name String, datetime DateTime) engine Memory"
)
- on_exit(fn ->
- Ch.Test.query("drop table ch_non_utc_datetimes", [], database: Ch.Test.database())
- end)
+ on_exit(fn -> Ch.Test.query("drop table ch_non_utc_datetimes") end)
utc = ~U[2024-12-21 05:35:19.886393Z]
@@ -168,13 +188,18 @@ defmodule Ch.ConnectionTest do
rows = [["taipei", taipei], ["tokyo", tokyo], ["vienna", vienna]]
- Ch.query!(conn, "insert into ch_non_utc_datetimes(name, datetime) format RowBinary", rows,
+ parameterize_query!(
+ ctx,
+ "insert into ch_non_utc_datetimes(name, datetime) format RowBinary",
+ rows,
types: ["String", "DateTime"]
)
result =
- conn
- |> Ch.query!("select name, cast(datetime as DateTime('UTC')) from ch_non_utc_datetimes")
+ parameterize_query!(
+ ctx,
+ "select name, cast(datetime as DateTime('UTC')) from ch_non_utc_datetimes"
+ )
|> Map.fetch!(:rows)
|> Map.new(fn [name, datetime] -> {name, datetime} end)
@@ -183,57 +208,67 @@ defmodule Ch.ConnectionTest do
assert result["vienna"] == ~U[2024-12-21 05:35:19Z]
end
- test "utc datetime64 query param encoding", %{conn: conn} do
+ test "utc datetime64 query param encoding", ctx do
utc = ~U[2021-01-01 12:00:00.123456Z]
msk = DateTime.new!(~D[2021-01-01], ~T[15:00:00.123456], "Europe/Moscow")
- naive = utc |> DateTime.shift_zone!(Ch.Test.clickhouse_tz(conn)) |> DateTime.to_naive()
+ naive = utc |> DateTime.shift_zone!(Ch.Test.clickhouse_tz(ctx.conn)) |> DateTime.to_naive()
- assert Ch.query!(conn, "select {$0:DateTime64(6)} as d, toString(d)", [utc]).rows ==
+ assert parameterize_query!(ctx, "select {$0:DateTime64(6)} as d, toString(d)", [utc]).rows ==
[[~N[2021-01-01 12:00:00.123456], to_string(naive)]]
- assert Ch.query!(conn, "select {$0:DateTime64(6, 'UTC')} as d, toString(d)", [utc]).rows ==
+ assert parameterize_query!(ctx, "select {$0:DateTime64(6, 'UTC')} as d, toString(d)", [utc]).rows ==
[[utc, "2021-01-01 12:00:00.123456"]]
- assert Ch.query!(conn, "select {$0:DateTime64(6,'Europe/Moscow')} as d, toString(d)", [utc]).rows ==
+ assert parameterize_query!(
+ ctx,
+ "select {$0:DateTime64(6,'Europe/Moscow')} as d, toString(d)",
+ [utc]
+ ).rows ==
[[msk, "2021-01-01 15:00:00.123456"]]
end
- test "utc datetime64 zero microseconds query param encoding", %{conn: conn} do
+ test "utc datetime64 zero microseconds query param encoding", ctx do
# this test case guards against a previous bug where DateTimes with a microsecond value of 0 and precision > 0 would
# get encoded as a val like "1.6095024e9" which ClickHouse would be unable to parse to a DateTime.
utc = ~U[2021-01-01 12:00:00.000000Z]
- naive = utc |> DateTime.shift_zone!(Ch.Test.clickhouse_tz(conn)) |> DateTime.to_naive()
+ naive = utc |> DateTime.shift_zone!(Ch.Test.clickhouse_tz(ctx.conn)) |> DateTime.to_naive()
- assert Ch.query!(conn, "select {$0:DateTime64(6)} as d, toString(d)", [utc]).rows ==
+ assert parameterize_query!(ctx, "select {$0:DateTime64(6)} as d, toString(d)", [utc]).rows ==
[[~N[2021-01-01 12:00:00.000000], to_string(naive)]]
end
- test "utc datetime64 microseconds with more precision than digits", %{conn: conn} do
+ test "utc datetime64 microseconds with more precision than digits", ctx do
# this test case guards against a previous bug where DateTimes with a microsecond value of with N digits
# and a precision > N would be encoded with a space like `234235234. 234123`
utc = ~U[2024-05-26 20:00:46.099856Z]
- naive = utc |> DateTime.shift_zone!(Ch.Test.clickhouse_tz(conn)) |> DateTime.to_naive()
+ naive = utc |> DateTime.shift_zone!(Ch.Test.clickhouse_tz(ctx.conn)) |> DateTime.to_naive()
- assert Ch.query!(conn, "select {$0:DateTime64(6)} as d, toString(d)", [utc]).rows ==
+ assert parameterize_query!(ctx, "select {$0:DateTime64(6)} as d, toString(d)", [utc]).rows ==
[[~N[2024-05-26 20:00:46.099856Z], to_string(naive)]]
end
- test "select with options", %{conn: conn} do
+ test "select with options", ctx do
assert {:ok, %{num_rows: 1, rows: [["async_insert", "Bool", "1"]]}} =
- Ch.query(conn, "show settings like 'async_insert'", [], settings: [async_insert: 1])
+ parameterize_query(ctx, "show settings like 'async_insert'", [],
+ settings: [async_insert: 1]
+ )
assert {:ok, %{num_rows: 1, rows: [["async_insert", "Bool", "0"]]}} =
- Ch.query(conn, "show settings like 'async_insert'", [], settings: [async_insert: 0])
+ parameterize_query(ctx, "show settings like 'async_insert'", [],
+ settings: [async_insert: 0]
+ )
end
- test "create", %{conn: conn} do
+ test "create", ctx do
assert {:ok, %{command: :create, num_rows: nil, rows: [], data: []}} =
- Ch.query(conn, "create table create_example(a UInt8) engine = Memory")
+ parameterize_query(ctx, "create table create_example(a UInt8) engine = Memory")
+
+ on_exit(fn -> Ch.Test.query("drop table create_example") end)
end
- test "create with options", %{conn: conn} do
+ test "create with options", ctx do
assert {:error, %Ch.Error{code: 164, message: message}} =
- Ch.query(conn, "create table create_example(a UInt8) engine = Memory", [],
+ parameterize_query(ctx, "create table create_example(a UInt8) engine = Memory", [],
settings: [readonly: 1]
)
@@ -241,49 +276,51 @@ defmodule Ch.ConnectionTest do
end
describe "insert" do
- setup %{conn: conn} do
+ setup ctx do
table = "insert_t_#{System.unique_integer([:positive])}"
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"create table #{table}(a UInt8 default 1, b String) engine = Memory"
)
{:ok, table: table}
end
- test "values", %{conn: conn, table: table} do
+ test "values", %{table: table} = ctx do
assert {:ok, %{num_rows: 3}} =
- Ch.query(
- conn,
+ parameterize_query(
+ ctx,
"insert into {table:Identifier} values (1, 'a'),(2,'b'), (null, null)",
%{"table" => table}
)
assert {:ok, %{rows: rows}} =
- Ch.query(conn, "select * from {table:Identifier}", %{"table" => table})
+ parameterize_query(ctx, "select * from {table:Identifier}", %{"table" => table})
assert rows == [[1, "a"], [2, "b"], [1, ""]]
assert {:ok, %{num_rows: 2}} =
- Ch.query(
- conn,
+ parameterize_query(
+ ctx,
"insert into {$0:Identifier}(a, b) values ({$1:UInt8},{$2:String}),({$3:UInt8},{$4:String})",
[table, 4, "d", 5, "e"]
)
assert {:ok, %{rows: rows}} =
- Ch.query(conn, "select * from {table:Identifier} where a >= 4", %{"table" => table})
+ parameterize_query(ctx, "select * from {table:Identifier} where a >= 4", %{
+ "table" => table
+ })
assert rows == [[4, "d"], [5, "e"]]
end
- test "when readonly", %{conn: conn, table: table} do
+ test "when readonly", %{table: table} = ctx do
settings = [readonly: 1]
assert {:error, %Ch.Error{code: 164, message: message}} =
- Ch.query(
- conn,
+ parameterize_query(
+ ctx,
"insert into {table:Identifier} values (1, 'a'), (2, 'b')",
%{"table" => table},
settings: settings
@@ -292,34 +329,34 @@ defmodule Ch.ConnectionTest do
assert message =~ "Cannot execute query in readonly mode."
end
- test "automatic RowBinary", %{conn: conn, table: table} do
+ test "automatic RowBinary", %{table: table} = ctx do
stmt = "insert into #{table}(a, b) format RowBinary"
types = ["UInt8", "String"]
rows = [[1, "a"], [2, "b"]]
- assert %{num_rows: 2} = Ch.query!(conn, stmt, rows, types: types)
+ assert %{num_rows: 2} = parameterize_query!(ctx, stmt, rows, types: types)
assert %{rows: rows} =
- Ch.query!(conn, "select * from {table:Identifier}", %{"table" => table})
+ parameterize_query!(ctx, "select * from {table:Identifier}", %{"table" => table})
assert rows == [[1, "a"], [2, "b"]]
end
- test "manual RowBinary", %{conn: conn, table: table} do
+ test "manual RowBinary", %{table: table} = ctx do
stmt = "insert into #{table}(a, b) format RowBinary"
types = ["UInt8", "String"]
rows = [[1, "a"], [2, "b"]]
data = RowBinary.encode_rows(rows, types)
- assert %{num_rows: 2} = Ch.query!(conn, stmt, data, encode: false)
+ assert %{num_rows: 2} = parameterize_query!(ctx, stmt, data, encode: false)
assert %{rows: rows} =
- Ch.query!(conn, "select * from {table:Identifier}", %{"table" => table})
+ parameterize_query!(ctx, "select * from {table:Identifier}", %{"table" => table})
assert rows == [[1, "a"], [2, "b"]]
end
- test "chunked", %{conn: conn, table: table} do
+ test "chunked", %{table: table} = ctx do
types = ["UInt8", "String"]
rows = [[1, "a"], [2, "b"], [3, "c"]]
@@ -329,122 +366,139 @@ defmodule Ch.ConnectionTest do
|> Stream.map(fn chunk -> RowBinary.encode_rows(chunk, types) end)
assert {:ok, %{num_rows: 3}} =
- Ch.query(
- conn,
+ parameterize_query(
+ ctx,
"insert into #{table}(a, b) format RowBinary",
stream,
encode: false
)
assert {:ok, %{rows: rows}} =
- Ch.query(conn, "select * from {table:Identifier}", %{"table" => table})
+ parameterize_query(ctx, "select * from {table:Identifier}", %{"table" => table})
assert rows == [[1, "a"], [2, "b"], [3, "c"]]
end
- test "select", %{conn: conn, table: table} do
+ test "select", %{table: table} = ctx do
assert {:ok, %{num_rows: 3}} =
- Ch.query(
- conn,
+ parameterize_query(
+ ctx,
"insert into {table:Identifier} values (1, 'a'), (2, 'b'), (null, null)",
%{"table" => table}
)
assert {:ok, %{num_rows: 3}} =
- Ch.query(
- conn,
+ parameterize_query(
+ ctx,
"insert into {table:Identifier}(a, b) select a, b from {table:Identifier}",
%{"table" => table}
)
assert {:ok, %{rows: rows}} =
- Ch.query(conn, "select * from {table:Identifier}", %{"table" => table})
+ parameterize_query(ctx, "select * from {table:Identifier}", %{"table" => table})
assert rows == [[1, "a"], [2, "b"], [1, ""], [1, "a"], [2, "b"], [1, ""]]
assert {:ok, %{num_rows: 2}} =
- Ch.query(
- conn,
+ parameterize_query(
+ ctx,
"insert into {$0:Identifier}(a, b) select a, b from {$0:Identifier} where a > {$1:UInt8}",
[table, 1]
)
assert {:ok, %{rows: new_rows}} =
- Ch.query(conn, "select * from {table:Identifier}", %{"table" => table})
+ parameterize_query(ctx, "select * from {table:Identifier}", %{"table" => table})
assert new_rows -- rows == [[2, "b"], [2, "b"]]
end
end
- test "delete", %{conn: conn} do
- Ch.query!(
- conn,
+ test "delete", ctx do
+ parameterize_query!(
+ ctx,
"create table delete_t(a UInt8, b String) engine = MergeTree order by tuple()"
)
- assert {:ok, %{num_rows: 2}} = Ch.query(conn, "insert into delete_t values (1,'a'), (2,'b')")
+ on_exit(fn -> Ch.Test.query("drop table delete_t") end)
+
+ assert {:ok, %{num_rows: 2}} =
+ parameterize_query(ctx, "insert into delete_t values (1,'a'), (2,'b')")
settings = [allow_experimental_lightweight_delete: 1]
assert {:ok, %{rows: [], data: [], command: :delete}} =
- Ch.query(conn, "delete from delete_t where 1", [], settings: settings)
+ parameterize_query(ctx, "delete from delete_t where 1", [], settings: settings)
end
- test "query!", %{conn: conn} do
- assert %{num_rows: 1, rows: [[1]]} = Ch.query!(conn, "select 1")
+ test "query!", ctx do
+ assert %{num_rows: 1, rows: [[1]]} = parameterize_query!(ctx, "select 1")
end
describe "types" do
- test "multiple types", %{conn: conn} do
+ test "multiple types", ctx do
assert {:ok, %{num_rows: 1, rows: [[1, "a"]]}} =
- Ch.query(conn, "select {a:Int8}, {b:String}", %{"a" => 1, "b" => "a"})
+ parameterize_query(ctx, "select {a:Int8}, {b:String}", %{"a" => 1, "b" => "a"})
end
- test "ints", %{conn: conn} do
- assert {:ok, %{num_rows: 1, rows: [[1]]}} = Ch.query(conn, "select {a:Int8}", %{"a" => 1})
+ test "ints", ctx do
+ assert {:ok, %{num_rows: 1, rows: [[1]]}} =
+ parameterize_query(ctx, "select {a:Int8}", %{"a" => 1})
assert {:ok, %{num_rows: 1, rows: [[-1000]]}} =
- Ch.query(conn, "select {a:Int16}", %{"a" => -1000})
+ parameterize_query(ctx, "select {a:Int16}", %{"a" => -1000})
assert {:ok, %{num_rows: 1, rows: [[100_000]]}} =
- Ch.query(conn, "select {a:Int32}", %{"a" => 100_000})
+ parameterize_query(ctx, "select {a:Int32}", %{"a" => 100_000})
+
+ assert {:ok, %{num_rows: 1, rows: [[1]]}} =
+ parameterize_query(ctx, "select {a:Int64}", %{"a" => 1})
+
+ assert {:ok, %{num_rows: 1, rows: [[1]]}} =
+ parameterize_query(ctx, "select {a:Int128}", %{"a" => 1})
- assert {:ok, %{num_rows: 1, rows: [[1]]}} = Ch.query(conn, "select {a:Int64}", %{"a" => 1})
- assert {:ok, %{num_rows: 1, rows: [[1]]}} = Ch.query(conn, "select {a:Int128}", %{"a" => 1})
- assert {:ok, %{num_rows: 1, rows: [[1]]}} = Ch.query(conn, "select {a:Int256}", %{"a" => 1})
+ assert {:ok, %{num_rows: 1, rows: [[1]]}} =
+ parameterize_query(ctx, "select {a:Int256}", %{"a" => 1})
end
- test "uints", %{conn: conn} do
- assert {:ok, %{num_rows: 1, rows: [[1]]}} = Ch.query(conn, "select {a:UInt8}", %{"a" => 1})
- assert {:ok, %{num_rows: 1, rows: [[1]]}} = Ch.query(conn, "select {a:UInt16}", %{"a" => 1})
- assert {:ok, %{num_rows: 1, rows: [[1]]}} = Ch.query(conn, "select {a:UInt32}", %{"a" => 1})
- assert {:ok, %{num_rows: 1, rows: [[1]]}} = Ch.query(conn, "select {a:UInt64}", %{"a" => 1})
+ test "uints", ctx do
+ assert {:ok, %{num_rows: 1, rows: [[1]]}} =
+ parameterize_query(ctx, "select {a:UInt8}", %{"a" => 1})
+
+ assert {:ok, %{num_rows: 1, rows: [[1]]}} =
+ parameterize_query(ctx, "select {a:UInt16}", %{"a" => 1})
+
+ assert {:ok, %{num_rows: 1, rows: [[1]]}} =
+ parameterize_query(ctx, "select {a:UInt32}", %{"a" => 1})
+
+ assert {:ok, %{num_rows: 1, rows: [[1]]}} =
+ parameterize_query(ctx, "select {a:UInt64}", %{"a" => 1})
assert {:ok, %{num_rows: 1, rows: [[1]]}} =
- Ch.query(conn, "select {a:UInt128}", %{"a" => 1})
+ parameterize_query(ctx, "select {a:UInt128}", %{"a" => 1})
assert {:ok, %{num_rows: 1, rows: [[1]]}} =
- Ch.query(conn, "select {a:UInt256}", %{"a" => 1})
+ parameterize_query(ctx, "select {a:UInt256}", %{"a" => 1})
end
- test "fixed string", %{conn: conn} do
+ test "fixed string", ctx do
assert {:ok, %{num_rows: 1, rows: [[<<0, 0>>]]}} =
- Ch.query(conn, "select {a:FixedString(2)}", %{"a" => ""})
+ parameterize_query(ctx, "select {a:FixedString(2)}", %{"a" => ""})
assert {:ok, %{num_rows: 1, rows: [["a" <> <<0>>]]}} =
- Ch.query(conn, "select {a:FixedString(2)}", %{"a" => "a"})
+ parameterize_query(ctx, "select {a:FixedString(2)}", %{"a" => "a"})
assert {:ok, %{num_rows: 1, rows: [["aa"]]}} =
- Ch.query(conn, "select {a:FixedString(2)}", %{"a" => "aa"})
+ parameterize_query(ctx, "select {a:FixedString(2)}", %{"a" => "aa"})
assert {:ok, %{num_rows: 1, rows: [["aaaaa"]]}} =
- Ch.query(conn, "select {a:FixedString(5)}", %{"a" => "aaaaa"})
+ parameterize_query(ctx, "select {a:FixedString(5)}", %{"a" => "aaaaa"})
- Ch.query!(conn, "create table fixed_string_t(a FixedString(3)) engine = Memory")
+ parameterize_query!(ctx, "create table fixed_string_t(a FixedString(3)) engine = Memory")
+ on_exit(fn -> Ch.Test.query("drop table fixed_string_t") end)
assert {:ok, %{num_rows: 4}} =
- Ch.query(
- conn,
+ parameterize_query(
+ ctx,
"insert into fixed_string_t(a) format RowBinary",
[
[""],
@@ -455,7 +509,7 @@ defmodule Ch.ConnectionTest do
types: ["FixedString(3)"]
)
- assert Ch.query!(conn, "select * from fixed_string_t").rows == [
+ assert parameterize_query!(ctx, "select * from fixed_string_t").rows == [
[<<0, 0, 0>>],
["a" <> <<0, 0>>],
["aa" <> <<0>>],
@@ -463,32 +517,33 @@ defmodule Ch.ConnectionTest do
]
end
- test "decimal", %{conn: conn} do
+ test "decimal", ctx do
assert {:ok, %{num_rows: 1, rows: [row]}} =
- Ch.query(conn, "SELECT toDecimal32(2, 4) AS x, x / 3, toTypeName(x)")
+ parameterize_query(ctx, "SELECT toDecimal32(2, 4) AS x, x / 3, toTypeName(x)")
assert row == [Decimal.new("2.0000"), Decimal.new("0.6666"), "Decimal(9, 4)"]
assert {:ok, %{num_rows: 1, rows: [row]}} =
- Ch.query(conn, "SELECT toDecimal64(2, 4) AS x, x / 3, toTypeName(x)")
+ parameterize_query(ctx, "SELECT toDecimal64(2, 4) AS x, x / 3, toTypeName(x)")
assert row == [Decimal.new("2.0000"), Decimal.new("0.6666"), "Decimal(18, 4)"]
assert {:ok, %{num_rows: 1, rows: [row]}} =
- Ch.query(conn, "SELECT toDecimal128(2, 4) AS x, x / 3, toTypeName(x)")
+ parameterize_query(ctx, "SELECT toDecimal128(2, 4) AS x, x / 3, toTypeName(x)")
assert row == [Decimal.new("2.0000"), Decimal.new("0.6666"), "Decimal(38, 4)"]
assert {:ok, %{num_rows: 1, rows: [row]}} =
- Ch.query(conn, "SELECT toDecimal256(2, 4) AS x, x / 3, toTypeName(x)")
+ parameterize_query(ctx, "SELECT toDecimal256(2, 4) AS x, x / 3, toTypeName(x)")
assert row == [Decimal.new("2.0000"), Decimal.new("0.6666"), "Decimal(76, 4)"]
- Ch.query!(conn, "create table decimal_t(d Decimal32(4)) engine = Memory")
+ parameterize_query!(ctx, "create table decimal_t(d Decimal32(4)) engine = Memory")
+ on_exit(fn -> Ch.Test.query("drop table decimal_t") end)
assert %{num_rows: 3} =
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"insert into decimal_t(d) format RowBinary",
_rows = [
[Decimal.new("2.66")],
@@ -498,35 +553,37 @@ defmodule Ch.ConnectionTest do
types: ["Decimal32(4)"]
)
- assert Ch.query!(conn, "select * from decimal_t").rows == [
+ assert parameterize_query!(ctx, "select * from decimal_t").rows == [
[Decimal.new("2.6600")],
[Decimal.new("2.6666")],
[Decimal.new("2.6667")]
]
end
- test "boolean", %{conn: conn} do
+ test "boolean", ctx do
assert {:ok, %{num_rows: 1, rows: [[true, "Bool"]]}} =
- Ch.query(conn, "select true as col, toTypeName(col)")
+ parameterize_query(ctx, "select true as col, toTypeName(col)")
assert {:ok, %{num_rows: 1, rows: [[1, "UInt8"]]}} =
- Ch.query(conn, "select true == 1 as col, toTypeName(col)")
+ parameterize_query(ctx, "select true == 1 as col, toTypeName(col)")
- assert {:ok, %{num_rows: 1, rows: [[true, false]]}} = Ch.query(conn, "select true, false")
+ assert {:ok, %{num_rows: 1, rows: [[true, false]]}} =
+ parameterize_query(ctx, "select true, false")
- Ch.query!(conn, "create table test_bool(A Int64, B Bool) engine = Memory")
+ parameterize_query!(ctx, "create table test_bool(A Int64, B Bool) engine = Memory")
+ on_exit(fn -> Ch.Test.query("drop table test_bool") end)
- Ch.query!(conn, "INSERT INTO test_bool VALUES (1, true),(2,0)")
+ parameterize_query!(ctx, "INSERT INTO test_bool VALUES (1, true),(2,0)")
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"insert into test_bool(A, B) format RowBinary",
_rows = [[3, true], [4, false]],
types: ["Int64", "Bool"]
)
# anything > 0 is `true`, here `2` is `true`
- Ch.query!(conn, "insert into test_bool(A, B) values (5, 2)")
+ parameterize_query!(ctx, "insert into test_bool(A, B) values (5, 2)")
assert %{
rows: [
@@ -536,15 +593,15 @@ defmodule Ch.ConnectionTest do
[4, false, 0],
[5, true, 5]
]
- } = Ch.query!(conn, "SELECT *, A * B FROM test_bool ORDER BY A")
+ } = parameterize_query!(ctx, "SELECT *, A * B FROM test_bool ORDER BY A")
end
- test "uuid", %{conn: conn} do
+ test "uuid", ctx do
assert {:ok, %{num_rows: 1, rows: [[<<_::16-bytes>>]]}} =
- Ch.query(conn, "select generateUUIDv4()")
+ parameterize_query(ctx, "select generateUUIDv4()")
assert {:ok, %{num_rows: 1, rows: [[uuid, "417ddc5d-e556-4d27-95dd-a34d84e46a50"]]}} =
- Ch.query(conn, "select {uuid:UUID} as u, toString(u)", %{
+ parameterize_query(ctx, "select {uuid:UUID} as u, toString(u)", %{
"uuid" => "417ddc5d-e556-4d27-95dd-a34d84e46a50"
})
@@ -553,16 +610,18 @@ defmodule Ch.ConnectionTest do
|> String.replace("-", "")
|> Base.decode16!(case: :lower)
- Ch.query!(conn, " CREATE TABLE t_uuid (x UUID, y String) ENGINE Memory")
- Ch.query!(conn, "INSERT INTO t_uuid SELECT generateUUIDv4(), 'Example 1'")
+ parameterize_query!(ctx, " CREATE TABLE t_uuid (x UUID, y String) ENGINE Memory")
+ on_exit(fn -> Ch.Test.query("drop table t_uuid") end)
+
+ parameterize_query!(ctx, "INSERT INTO t_uuid SELECT generateUUIDv4(), 'Example 1'")
assert {:ok, %{num_rows: 1, rows: [[<<_::16-bytes>>, "Example 1"]]}} =
- Ch.query(conn, "SELECT * FROM t_uuid")
+ parameterize_query(ctx, "SELECT * FROM t_uuid")
- Ch.query!(conn, "INSERT INTO t_uuid (y) VALUES ('Example 2')")
+ parameterize_query!(ctx, "INSERT INTO t_uuid (y) VALUES ('Example 2')")
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"insert into t_uuid(x,y) format RowBinary",
_rows = [[uuid, "Example 3"]],
types: ["UUID", "String"]
@@ -576,27 +635,32 @@ defmodule Ch.ConnectionTest do
[<<0::128>>, "Example 2"],
[^uuid, "Example 3"]
]
- }} = Ch.query(conn, "SELECT * FROM t_uuid ORDER BY y")
+ }} = parameterize_query(ctx, "SELECT * FROM t_uuid ORDER BY y")
end
@tag :skip
- test "json", %{conn: conn} do
+ test "json", ctx do
settings = [allow_experimental_object_type: 1]
- Ch.query!(conn, "CREATE TABLE json(o JSON) ENGINE = Memory", [], settings: settings)
+ parameterize_query!(ctx, "CREATE TABLE json(o JSON) ENGINE = Memory", [],
+ settings: settings
+ )
- Ch.query!(conn, ~s|INSERT INTO json VALUES ('{"a": 1, "b": { "c": 2, "d": [1, 2, 3] }}')|)
+ parameterize_query!(
+ ctx,
+ ~s|INSERT INTO json VALUES ('{"a": 1, "b": { "c": 2, "d": [1, 2, 3] }}')|
+ )
- assert Ch.query!(conn, "SELECT o.a, o.b.c, o.b.d[3] FROM json").rows == [[1, 2, 3]]
+ assert parameterize_query!(ctx, "SELECT o.a, o.b.c, o.b.d[3] FROM json").rows == [[1, 2, 3]]
# named tuples are not supported yet
- assert_raise ArgumentError, fn -> Ch.query!(conn, "SELECT o FROM json") end
+ assert_raise ArgumentError, fn -> parameterize_query!(ctx, "SELECT o FROM json") end
end
@tag :json
- test "json as string", %{conn: conn} do
+ test "json as string", ctx do
# after v25 ClickHouse started rendering numbers in JSON as strings
- [[version]] = Ch.query!(conn, "select version()").rows
+ [[version]] = parameterize_query!(ctx, "select version()").rows
parse_version = fn version ->
version |> String.split(".") |> Enum.map(&String.to_integer/1)
@@ -618,16 +682,18 @@ defmodule Ch.ConnectionTest do
]
end
- assert Ch.query!(conn, ~s|select '{"answer":42}'::JSON::String|, [],
+ assert parameterize_query!(ctx, ~s|select '{"answer":42}'::JSON::String|, [],
settings: [enable_json_type: 1]
).rows == expected1
- Ch.query!(conn, "CREATE TABLE test_json_as_string(json JSON) ENGINE = Memory", [],
+ parameterize_query!(ctx, "CREATE TABLE test_json_as_string(json JSON) ENGINE = Memory", [],
settings: [enable_json_type: 1]
)
- Ch.query!(
- conn,
+ on_exit(fn -> Ch.Test.query("DROP TABLE test_json_as_string") end)
+
+ parameterize_query!(
+ ctx,
"INSERT INTO test_json_as_string(json) FORMAT RowBinary",
_rows = [[Jason.encode_to_iodata!(%{"a" => 42})], [Jason.encode_to_iodata!(%{"b" => 10})]],
types: [:string],
@@ -637,84 +703,99 @@ defmodule Ch.ConnectionTest do
]
)
- assert Ch.query!(conn, "select json::String from test_json_as_string", [],
+ assert parameterize_query!(ctx, "select json::String from test_json_as_string", [],
settings: [enable_json_type: 1]
).rows == expected2
end
# TODO enum16
- test "enum8", %{conn: conn} do
+ test "enum8", ctx do
assert {:ok, %{num_rows: 1, rows: [["Enum8('a' = 1, 'b' = 2)"]]}} =
- Ch.query(conn, "SELECT toTypeName(CAST('a', 'Enum(\\'a\\' = 1, \\'b\\' = 2)'))")
+ parameterize_query(
+ ctx,
+ "SELECT toTypeName(CAST('a', 'Enum(\\'a\\' = 1, \\'b\\' = 2)'))"
+ )
assert {:ok, %{num_rows: 1, rows: [["a"]]}} =
- Ch.query(conn, "SELECT CAST('a', 'Enum(\\'a\\' = 1, \\'b\\' = 2)')")
+ parameterize_query(ctx, "SELECT CAST('a', 'Enum(\\'a\\' = 1, \\'b\\' = 2)')")
assert {:ok, %{num_rows: 1, rows: [["b"]]}} =
- Ch.query(conn, "select {enum:Enum('a' = 1, 'b' = 2)}", %{"enum" => "b"})
+ parameterize_query(ctx, "select {enum:Enum('a' = 1, 'b' = 2)}", %{"enum" => "b"})
assert {:ok, %{num_rows: 1, rows: [["b"]]}} =
- Ch.query(conn, "select {enum:Enum('a' = 1, 'b' = 2)}", %{"enum" => 2})
+ parameterize_query(ctx, "select {enum:Enum('a' = 1, 'b' = 2)}", %{"enum" => 2})
assert {:ok, %{num_rows: 1, rows: [["b"]]}} =
- Ch.query(conn, "select {enum:Enum16('a' = 1, 'b' = 2)}", %{"enum" => 2})
+ parameterize_query(ctx, "select {enum:Enum16('a' = 1, 'b' = 2)}", %{"enum" => 2})
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"CREATE TABLE t_enum(i UInt8, x Enum('hello' = 1, 'world' = 2)) ENGINE Memory"
)
- Ch.query!(conn, "INSERT INTO t_enum VALUES (0, 'hello'), (1, 'world'), (2, 'hello')")
+ on_exit(fn -> Ch.Test.query("DROP TABLE t_enum") end)
- assert Ch.query!(conn, "SELECT *, CAST(x, 'Int8') FROM t_enum ORDER BY i").rows == [
- [0, "hello", 1],
- [1, "world", 2],
- [2, "hello", 1]
- ]
+ parameterize_query!(
+ ctx,
+ "INSERT INTO t_enum VALUES (0, 'hello'), (1, 'world'), (2, 'hello')"
+ )
+
+ assert parameterize_query!(ctx, "SELECT *, CAST(x, 'Int8') FROM t_enum ORDER BY i").rows ==
+ [
+ [0, "hello", 1],
+ [1, "world", 2],
+ [2, "hello", 1]
+ ]
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"INSERT INTO t_enum(i, x) FORMAT RowBinary",
_rows = [[3, "hello"], [4, "world"], [5, 1], [6, 2]],
types: ["UInt8", "Enum8('hello' = 1, 'world' = 2)"]
)
- assert Ch.query!(conn, "SELECT *, CAST(x, 'Int8') FROM t_enum ORDER BY i").rows == [
- [0, "hello", 1],
- [1, "world", 2],
- [2, "hello", 1],
- [3, "hello", 1],
- [4, "world", 2],
- [5, "hello", 1],
- [6, "world", 2]
- ]
+ assert parameterize_query!(ctx, "SELECT *, CAST(x, 'Int8') FROM t_enum ORDER BY i").rows ==
+ [
+ [0, "hello", 1],
+ [1, "world", 2],
+ [2, "hello", 1],
+ [3, "hello", 1],
+ [4, "world", 2],
+ [5, "hello", 1],
+ [6, "world", 2]
+ ]
# TODO nil enum
end
- test "map", %{conn: conn} do
- assert Ch.query!(
- conn,
+ test "map", ctx do
+ assert parameterize_query!(
+ ctx,
"SELECT CAST(([1, 2, 3], ['Ready', 'Steady', 'Go']), 'Map(UInt8, String)') AS map"
).rows == [[%{1 => "Ready", 2 => "Steady", 3 => "Go"}]]
- assert Ch.query!(conn, "select {map:Map(String, UInt8)}", %{
+ assert parameterize_query!(ctx, "select {map:Map(String, UInt8)}", %{
"map" => %{"pg" => 13, "hello" => 100}
}).rows == [[%{"hello" => 100, "pg" => 13}]]
- Ch.query!(conn, "CREATE TABLE table_map (a Map(String, UInt64)) ENGINE=Memory")
+ parameterize_query!(ctx, "CREATE TABLE table_map (a Map(String, UInt64)) ENGINE=Memory")
+ on_exit(fn -> Ch.Test.query("DROP TABLE table_map") end)
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"INSERT INTO table_map VALUES ({'key1':1, 'key2':10}), ({'key1':2,'key2':20}), ({'key1':3,'key2':30})"
)
- assert Ch.query!(conn, "SELECT a['key2'] FROM table_map").rows == [[10], [20], [30]]
+ assert parameterize_query!(ctx, "SELECT a['key2'] FROM table_map").rows == [
+ [10],
+ [20],
+ [30]
+ ]
- assert Ch.query!(conn, "INSERT INTO table_map VALUES ({'key3':100}), ({})")
+ assert parameterize_query!(ctx, "INSERT INTO table_map VALUES ({'key3':100}), ({})")
- assert Ch.query!(conn, "SELECT a['key3'] FROM table_map ORDER BY 1 DESC").rows == [
+ assert parameterize_query!(ctx, "SELECT a['key3'] FROM table_map ORDER BY 1 DESC").rows == [
[100],
[0],
[0],
@@ -722,8 +803,8 @@ defmodule Ch.ConnectionTest do
[0]
]
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"INSERT INTO table_map FORMAT RowBinary",
_rows = [
[%{"key10" => 20, "key20" => 40}],
@@ -738,7 +819,7 @@ defmodule Ch.ConnectionTest do
types: ["Map(String, UInt64)"]
)
- assert Ch.query!(conn, "SELECT * FROM table_map ORDER BY a ASC").rows == [
+ assert parameterize_query!(ctx, "SELECT * FROM table_map ORDER BY a ASC").rows == [
[%{}],
[%{}],
[%{}],
@@ -752,36 +833,37 @@ defmodule Ch.ConnectionTest do
]
end
- test "tuple", %{conn: conn} do
- assert Ch.query!(conn, "SELECT tuple(1,'a') AS x, toTypeName(x)").rows == [
+ test "tuple", ctx do
+ assert parameterize_query!(ctx, "SELECT tuple(1,'a') AS x, toTypeName(x)").rows == [
[{1, "a"}, "Tuple(UInt8, String)"]
]
- assert Ch.query!(conn, "SELECT {$0:Tuple(Int8, String)}", [{-1, "abs"}]).rows == [
+ assert parameterize_query!(ctx, "SELECT {$0:Tuple(Int8, String)}", [{-1, "abs"}]).rows == [
[{-1, "abs"}]
]
- assert Ch.query!(conn, "SELECT tuple('a') AS x").rows == [[{"a"}]]
+ assert parameterize_query!(ctx, "SELECT tuple('a') AS x").rows == [[{"a"}]]
- assert Ch.query!(conn, "SELECT tuple(1, NULL) AS x, toTypeName(x)").rows == [
+ assert parameterize_query!(ctx, "SELECT tuple(1, NULL) AS x, toTypeName(x)").rows == [
[{1, nil}, "Tuple(UInt8, Nullable(Nothing))"]
]
# TODO named tuples
- Ch.query!(conn, "CREATE TABLE tuples_t (`a` Tuple(String, Int64)) ENGINE = Memory")
+ parameterize_query!(ctx, "CREATE TABLE tuples_t (`a` Tuple(String, Int64)) ENGINE = Memory")
+ on_exit(fn -> Ch.Test.query("DROP TABLE tuples_t") end)
assert %{num_rows: 2} =
- Ch.query!(conn, "INSERT INTO tuples_t VALUES (('y', 10)), (('x',-10))")
+ parameterize_query!(ctx, "INSERT INTO tuples_t VALUES (('y', 10)), (('x',-10))")
assert %{num_rows: 2} =
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"INSERT INTO tuples_t FORMAT RowBinary",
_rows = [[{"a", 20}], [{"b", 30}]],
types: ["Tuple(String, Int64)"]
)
- assert Ch.query!(conn, "SELECT a FROM tuples_t ORDER BY a.1 ASC").rows == [
+ assert parameterize_query!(ctx, "SELECT a FROM tuples_t ORDER BY a.1 ASC").rows == [
[{"a", 20}],
[{"b", 30}],
[{"x", -10}],
@@ -789,16 +871,21 @@ defmodule Ch.ConnectionTest do
]
end
- test "datetime", %{conn: conn} do
- Ch.query!(
- conn,
+ test "datetime", ctx do
+ parameterize_query!(
+ ctx,
"CREATE TABLE dt(`timestamp` DateTime('Asia/Istanbul'), `event_id` UInt8) ENGINE = Memory"
)
- Ch.query!(conn, "INSERT INTO dt Values (1546300800, 1), ('2019-01-01 00:00:00', 2)")
+ on_exit(fn -> Ch.Test.query("DROP TABLE dt") end)
+
+ parameterize_query!(
+ ctx,
+ "INSERT INTO dt Values (1546300800, 1), ('2019-01-01 00:00:00', 2)"
+ )
assert {:ok, %{num_rows: 2, rows: rows}} =
- Ch.query(conn, "SELECT *, toString(timestamp) FROM dt")
+ parameterize_query(ctx, "SELECT *, toString(timestamp) FROM dt")
assert rows == [
[
@@ -820,7 +907,7 @@ defmodule Ch.ConnectionTest do
# https://kb.altinity.com/altinity-kb-queries-and-syntax/time-zones/
assert {:ok,
%{num_rows: 1, rows: [[naive_datetime, "2022-12-12 12:00:00"]], headers: headers}} =
- Ch.query(conn, "select {$0:DateTime} as d, toString(d)", [naive_noon])
+ parameterize_query(ctx, "select {$0:DateTime} as d, toString(d)", [naive_noon])
# to make this test pass for contributors with non UTC timezone we perform the same steps as ClickHouse
# i.e. we give server timezone to the naive datetime and shift it to UTC before comparing with the result
@@ -833,10 +920,12 @@ defmodule Ch.ConnectionTest do
|> DateTime.to_naive()
assert {:ok, %{num_rows: 1, rows: [[~U[2022-12-12 12:00:00Z], "2022-12-12 12:00:00"]]}} =
- Ch.query(conn, "select {$0:DateTime('UTC')} as d, toString(d)", [naive_noon])
+ parameterize_query(ctx, "select {$0:DateTime('UTC')} as d, toString(d)", [
+ naive_noon
+ ])
assert {:ok, %{num_rows: 1, rows: rows}} =
- Ch.query(conn, "select {$0:DateTime('Asia/Bangkok')} as d, toString(d)", [
+ parameterize_query(ctx, "select {$0:DateTime('Asia/Bangkok')} as d, toString(d)", [
naive_noon
])
@@ -853,20 +942,26 @@ defmodule Ch.ConnectionTest do
on_exit(fn -> Calendar.put_time_zone_database(prev_tz_db) end)
assert_raise ArgumentError, ~r/:utc_only_time_zone_database/, fn ->
- Ch.query(conn, "select {$0:DateTime('Asia/Tokyo')}", [naive_noon])
+ parameterize_query(ctx, "select {$0:DateTime('Asia/Tokyo')}", [naive_noon])
end
end
# TODO are negatives correct? what's the range?
- test "date32", %{conn: conn} do
- Ch.query!(conn, "CREATE TABLE new(`timestamp` Date32, `event_id` UInt8) ENGINE = Memory;")
- Ch.query!(conn, "INSERT INTO new VALUES (4102444800, 1), ('2100-01-01', 2)")
+ test "date32", ctx do
+ parameterize_query!(
+ ctx,
+ "CREATE TABLE new(`timestamp` Date32, `event_id` UInt8) ENGINE = Memory;"
+ )
+
+ on_exit(fn -> Ch.Test.query("DROP TABLE new") end)
+
+ parameterize_query!(ctx, "INSERT INTO new VALUES (4102444800, 1), ('2100-01-01', 2)")
assert {:ok,
%{
num_rows: 2,
rows: [first_event, [~D[2100-01-01], 2, "2100-01-01"]]
- }} = Ch.query(conn, "SELECT *, toString(timestamp) FROM new")
+ }} = parameterize_query(ctx, "SELECT *, toString(timestamp) FROM new")
# TODO use timezone info to be more exact
assert first_event in [
@@ -875,18 +970,18 @@ defmodule Ch.ConnectionTest do
]
assert {:ok, %{num_rows: 1, rows: [[~D[1900-01-01], "1900-01-01"]]}} =
- Ch.query(conn, "select {$0:Date32} as d, toString(d)", [~D[1900-01-01]])
+ parameterize_query(ctx, "select {$0:Date32} as d, toString(d)", [~D[1900-01-01]])
# max
assert {:ok, %{num_rows: 1, rows: [[~D[2299-12-31], "2299-12-31"]]}} =
- Ch.query(conn, "select {$0:Date32} as d, toString(d)", [~D[2299-12-31]])
+ parameterize_query(ctx, "select {$0:Date32} as d, toString(d)", [~D[2299-12-31]])
# min
assert {:ok, %{num_rows: 1, rows: [[~D[1900-01-01], "1900-01-01"]]}} =
- Ch.query(conn, "select {$0:Date32} as d, toString(d)", [~D[1900-01-01]])
+ parameterize_query(ctx, "select {$0:Date32} as d, toString(d)", [~D[1900-01-01]])
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"insert into new(timestamp, event_id) format RowBinary",
_rows = [[~D[1960-01-01], 3]],
types: ["Date32", "UInt8"]
@@ -899,7 +994,11 @@ defmodule Ch.ConnectionTest do
[~D[2100-01-01], 2, "2100-01-01"],
[~D[1960-01-01], 3, "1960-01-01"]
]
- } = Ch.query!(conn, "SELECT *, toString(timestamp) FROM new ORDER BY event_id")
+ } =
+ parameterize_query!(
+ ctx,
+ "SELECT *, toString(timestamp) FROM new ORDER BY event_id"
+ )
# TODO use timezone info to be more exact
assert first_event in [
@@ -908,19 +1007,26 @@ defmodule Ch.ConnectionTest do
]
assert %{num_rows: 1, rows: [[3]]} =
- Ch.query!(conn, "SELECT event_id FROM new WHERE timestamp = '1960-01-01'")
+ parameterize_query!(ctx, "SELECT event_id FROM new WHERE timestamp = '1960-01-01'")
end
# https://clickhouse.com/docs/sql-reference/data-types/time
@tag :time
- test "time", %{conn: conn} do
+ test "time", ctx do
settings = [enable_time_time64_type: 1]
- Ch.query!(conn, "CREATE TABLE time_t(`time` Time, `event_id` UInt8) ENGINE = Memory", [],
+ parameterize_query!(
+ ctx,
+ "CREATE TABLE time_t(`time` Time, `event_id` UInt8) ENGINE = Memory",
+ [],
settings: settings
)
- Ch.query!(conn, "INSERT INTO time_t VALUES ('100:00:00', 1), (12453, 2)", [],
+ on_exit(fn ->
+ Ch.Test.query("DROP TABLE time_t", [], settings: settings)
+ end)
+
+ parameterize_query!(ctx, "INSERT INTO time_t VALUES ('100:00:00', 1), (12453, 2)", [],
settings: settings
)
@@ -930,10 +1036,12 @@ defmodule Ch.ConnectionTest do
assert_raise ArgumentError,
"ClickHouse Time value 3.6e5 (seconds) is out of Elixir's Time range (00:00:00.000000 - 23:59:59.999999)",
- fn -> Ch.query!(conn, "select * from time_t", [], settings: settings) end
+ fn ->
+ parameterize_query!(ctx, "select * from time_t", [], settings: settings)
+ end
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"INSERT INTO time_t(time, event_id) FORMAT RowBinary",
_rows = [
[~T[00:00:00], 3],
@@ -944,7 +1052,10 @@ defmodule Ch.ConnectionTest do
types: ["Time", "UInt8"]
)
- assert Ch.query!(conn, "select * from time_t where event_id > 1 order by event_id", [],
+ assert parameterize_query!(
+ ctx,
+ "select * from time_t where event_id > 1 order by event_id",
+ [],
settings: settings
).rows ==
[[~T[03:27:33], 2], [~T[00:00:00], 3], [~T[12:34:56], 4], [~T[23:59:59], 5]]
@@ -952,18 +1063,22 @@ defmodule Ch.ConnectionTest do
# https://clickhouse.com/docs/sql-reference/data-types/time64
@tag :time
- test "Time64(3)", %{conn: conn} do
+ test "Time64(3)", ctx do
settings = [enable_time_time64_type: 1]
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"CREATE TABLE time64_3_t(`time` Time64(3), `event_id` UInt8) ENGINE = Memory",
[],
settings: settings
)
- Ch.query!(
- conn,
+ on_exit(fn ->
+ Ch.Test.query("DROP TABLE time64_3_t", [], settings: settings)
+ end)
+
+ parameterize_query!(
+ ctx,
"INSERT INTO time64_3_t VALUES (15463123, 1), (154600.123, 2), ('100:00:00', 3);",
[],
settings: settings
@@ -975,10 +1090,12 @@ defmodule Ch.ConnectionTest do
assert_raise ArgumentError,
"ClickHouse Time value 154600.123 (seconds) is out of Elixir's Time range (00:00:00.000000 - 23:59:59.999999)",
- fn -> Ch.query!(conn, "select * from time64_3_t", [], settings: settings) end
+ fn ->
+ parameterize_query!(ctx, "select * from time64_3_t", [], settings: settings)
+ end
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"INSERT INTO time64_3_t(time, event_id) FORMAT RowBinary",
_rows = [
[~T[00:00:00.000000], 4],
@@ -991,8 +1108,8 @@ defmodule Ch.ConnectionTest do
types: ["Time64(3)", "UInt8"]
)
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"select * from time64_3_t where time < {max_elixir_time:Time64(6)} order by event_id",
%{"max_elixir_time" => ~T[23:59:59.999999]},
settings: settings
@@ -1008,18 +1125,22 @@ defmodule Ch.ConnectionTest do
end
@tag :time
- test "Time64(6)", %{conn: conn} do
+ test "Time64(6)", ctx do
settings = [enable_time_time64_type: 1]
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"CREATE TABLE time64_6_t(`time` Time64(6), `event_id` UInt8) ENGINE = Memory",
[],
settings: settings
)
- Ch.query!(
- conn,
+ on_exit(fn ->
+ Ch.Test.query("DROP TABLE time64_6_t", [], settings: settings)
+ end)
+
+ parameterize_query!(
+ ctx,
"INSERT INTO time64_6_t(time, event_id) FORMAT RowBinary",
_rows = [
[~T[00:00:00.000000], 1],
@@ -1032,8 +1153,8 @@ defmodule Ch.ConnectionTest do
types: ["Time64(6)", "UInt8"]
)
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"select * from time64_6_t order by event_id",
[],
settings: settings
@@ -1048,18 +1169,22 @@ defmodule Ch.ConnectionTest do
end
@tag :time
- test "Time64(9)", %{conn: conn} do
+ test "Time64(9)", ctx do
settings = [enable_time_time64_type: 1]
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"CREATE TABLE time64_9_t(`time` Time64(9), `event_id` UInt8) ENGINE = Memory",
[],
settings: settings
)
- Ch.query!(
- conn,
+ on_exit(fn ->
+ Ch.Test.query("DROP TABLE time64_9_t", [], settings: settings)
+ end)
+
+ parameterize_query!(
+ ctx,
"INSERT INTO time64_9_t(time, event_id) FORMAT RowBinary",
_rows = [
[~T[00:00:00.000000], 1],
@@ -1072,8 +1197,8 @@ defmodule Ch.ConnectionTest do
types: ["Time64(9)", "UInt8"]
)
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"select * from time64_9_t order by event_id",
[],
settings: settings
@@ -1087,19 +1212,21 @@ defmodule Ch.ConnectionTest do
]
end
- test "datetime64", %{conn: conn} do
- Ch.query!(
- conn,
+ test "datetime64", ctx do
+ parameterize_query!(
+ ctx,
"CREATE TABLE datetime64_t(`timestamp` DateTime64(3, 'Asia/Istanbul'), `event_id` UInt8) ENGINE = Memory"
)
- Ch.query!(
- conn,
+ on_exit(fn -> Ch.Test.query("DROP TABLE datetime64_t") end)
+
+ parameterize_query!(
+ ctx,
"INSERT INTO datetime64_t Values (1546300800123, 1), (1546300800.123, 2), ('2019-01-01 00:00:00', 3)"
)
assert {:ok, %{num_rows: 3, rows: rows}} =
- Ch.query(conn, "SELECT *, toString(timestamp) FROM datetime64_t")
+ parameterize_query(ctx, "SELECT *, toString(timestamp) FROM datetime64_t")
assert rows == [
[
@@ -1119,8 +1246,8 @@ defmodule Ch.ConnectionTest do
]
]
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"insert into datetime64_t(event_id, timestamp) format RowBinary",
_rows = [
[4, ~N[2021-01-01 12:00:00.123456]],
@@ -1130,8 +1257,8 @@ defmodule Ch.ConnectionTest do
)
assert {:ok, %{num_rows: 2, rows: rows}} =
- Ch.query(
- conn,
+ parameterize_query(
+ ctx,
"SELECT *, toString(timestamp) FROM datetime64_t WHERE timestamp > '2020-01-01'"
)
@@ -1155,7 +1282,7 @@ defmodule Ch.ConnectionTest do
# see https://clickhouse.com/docs/en/sql-reference/data-types/datetime
# https://kb.altinity.com/altinity-kb-queries-and-syntax/time-zones/
assert {:ok, %{num_rows: 1, rows: [[naive_datetime]], headers: headers}} =
- Ch.query(conn, "select {$0:DateTime64(#{precision})}", [naive_noon])
+ parameterize_query(ctx, "select {$0:DateTime64(#{precision})}", [naive_noon])
# to make this test pass for contributors with non UTC timezone we perform the same steps as ClickHouse
# i.e. we give server timezone to the naive datetime and shift it to UTC before comparing with the result
@@ -1172,20 +1299,24 @@ defmodule Ch.ConnectionTest do
assert {:ok,
%{num_rows: 1, rows: [[~U[2022-01-01 12:00:00.123Z], "2022-01-01 12:00:00.123"]]}} =
- Ch.query(conn, "select {dt:DateTime64(3,'UTC')} as d, toString(d)", %{
+ parameterize_query(ctx, "select {dt:DateTime64(3,'UTC')} as d, toString(d)", %{
"dt" => ~N[2022-01-01 12:00:00.123]
})
assert {:ok,
%{num_rows: 1, rows: [[~U[1900-01-01 12:00:00.123Z], "1900-01-01 12:00:00.123"]]}} =
- Ch.query(conn, "select {dt:DateTime64(3,'UTC')} as d, toString(d)", %{
+ parameterize_query(ctx, "select {dt:DateTime64(3,'UTC')} as d, toString(d)", %{
"dt" => ~N[1900-01-01 12:00:00.123]
})
assert {:ok, %{num_rows: 1, rows: [row]}} =
- Ch.query(conn, "select {dt:DateTime64(3,'Asia/Bangkok')} as d, toString(d)", %{
- "dt" => ~N[2022-01-01 12:00:00.123]
- })
+ parameterize_query(
+ ctx,
+ "select {dt:DateTime64(3,'Asia/Bangkok')} as d, toString(d)",
+ %{
+ "dt" => ~N[2022-01-01 12:00:00.123]
+ }
+ )
assert row == [
DateTime.new!(~D[2022-01-01], ~T[12:00:00.123], "Asia/Bangkok"),
@@ -1193,37 +1324,39 @@ defmodule Ch.ConnectionTest do
]
end
- test "nullable", %{conn: conn} do
- Ch.query!(
- conn,
+ test "nullable", ctx do
+ parameterize_query!(
+ ctx,
"CREATE TABLE nullable (`n` Nullable(UInt32)) ENGINE = MergeTree ORDER BY tuple()"
)
- Ch.query!(conn, "INSERT INTO nullable VALUES (1) (NULL) (2) (NULL)")
+ on_exit(fn -> Ch.Test.query("DROP TABLE nullable") end)
+
+ parameterize_query!(ctx, "INSERT INTO nullable VALUES (1) (NULL) (2) (NULL)")
assert {:ok, %{num_rows: 4, rows: [[0], [1], [0], [1]]}} =
- Ch.query(conn, "SELECT n.null FROM nullable")
+ parameterize_query(ctx, "SELECT n.null FROM nullable")
assert {:ok, %{num_rows: 4, rows: [[1], [nil], [2], [nil]]}} =
- Ch.query(conn, "SELECT n FROM nullable")
+ parameterize_query(ctx, "SELECT n FROM nullable")
# weird thing about nullables is that, similar to bool, in binary format, any byte larger than 0 is `null`
assert {:ok, %{num_rows: 5}} =
- Ch.query(
- conn,
+ parameterize_query(
+ ctx,
"insert into nullable format RowBinary",
<<1, 2, 3, 4, 5>>,
encode: false
)
assert %{num_rows: 1, rows: [[count]]} =
- Ch.query!(conn, "select count(*) from nullable where n is null")
+ parameterize_query!(ctx, "select count(*) from nullable where n is null")
assert count == 2 + 5
end
- test "nullable + default", %{conn: conn} do
- Ch.query!(conn, """
+ test "nullable + default", ctx do
+ parameterize_query!(ctx, """
CREATE TABLE ch_nulls (
a UInt8,
b UInt8 NULL,
@@ -1232,28 +1365,32 @@ defmodule Ch.ConnectionTest do
) ENGINE Memory
""")
- Ch.query!(
- conn,
+ on_exit(fn -> Ch.Test.query("DROP TABLE ch_nulls") end)
+
+ parameterize_query!(
+ ctx,
"INSERT INTO ch_nulls(a, b, c, d) FORMAT RowBinary",
[[nil, nil, nil, nil]],
types: ["UInt8", "Nullable(UInt8)", "UInt8", "Nullable(UInt8)"]
)
# default is ignored...
- assert Ch.query!(conn, "SELECT * FROM ch_nulls").rows == [[0, nil, 0, nil]]
+ assert parameterize_query!(ctx, "SELECT * FROM ch_nulls").rows == [[0, nil, 0, nil]]
end
# based on https://github.com/ClickHouse/clickhouse-java/pull/1345/files
- test "nullable + input() + default", %{conn: conn} do
- Ch.query!(conn, """
+ test "nullable + input() + default", ctx do
+ parameterize_query!(ctx, """
CREATE TABLE test_insert_default_value(
n Int32,
s String DEFAULT 'secret'
) ENGINE Memory
""")
- Ch.query!(
- conn,
+ on_exit(fn -> Ch.Test.query("DROP TABLE test_insert_default_value") end)
+
+ parameterize_query!(
+ ctx,
"""
INSERT INTO test_insert_default_value
SELECT id, name
@@ -1264,38 +1401,44 @@ defmodule Ch.ConnectionTest do
types: ["UInt32", "Nullable(String)"]
)
- assert Ch.query!(conn, "SELECT * FROM test_insert_default_value ORDER BY n").rows ==
+ assert parameterize_query!(ctx, "SELECT * FROM test_insert_default_value ORDER BY n").rows ==
[
[-1, "secret"],
[1, "secret"]
]
end
- test "can decode casted Point", %{conn: conn} do
- assert Ch.query!(conn, "select cast((0, 1) as Point)").rows == [
+ test "can decode casted Point", ctx do
+ assert parameterize_query!(ctx, "select cast((0, 1) as Point)").rows == [
_row = [_point = {0.0, 1.0}]
]
end
- test "can encode and then decode Point in query params", %{conn: conn} do
- assert Ch.query!(conn, "select {$0:Point}", [{10, 10}]).rows == [
+ test "can encode and then decode Point in query params", ctx do
+ assert parameterize_query!(ctx, "select {$0:Point}", [{10, 10}]).rows == [
_row = [_point = {10.0, 10.0}]
]
end
- test "can insert and select Point", %{conn: conn} do
- Ch.query!(conn, "CREATE TABLE geo_point (p Point) ENGINE = Memory()")
- Ch.query!(conn, "INSERT INTO geo_point VALUES((10, 10))")
- Ch.query!(conn, "INSERT INTO geo_point FORMAT RowBinary", [[{20, 20}]], types: ["Point"])
+ test "can insert and select Point", ctx do
+ parameterize_query!(ctx, "CREATE TABLE geo_point (p Point) ENGINE = Memory()")
+ on_exit(fn -> Ch.Test.query("DROP TABLE geo_point") end)
- assert Ch.query!(conn, "SELECT p, toTypeName(p) FROM geo_point ORDER BY p ASC").rows == [
- [{10.0, 10.0}, "Point"],
- [{20.0, 20.0}, "Point"]
- ]
+ parameterize_query!(ctx, "INSERT INTO geo_point VALUES((10, 10))")
+
+ parameterize_query!(ctx, "INSERT INTO geo_point FORMAT RowBinary", [[{20, 20}]],
+ types: ["Point"]
+ )
+
+ assert parameterize_query!(ctx, "SELECT p, toTypeName(p) FROM geo_point ORDER BY p ASC").rows ==
+ [
+ [{10.0, 10.0}, "Point"],
+ [{20.0, 20.0}, "Point"]
+ ]
# to make our RowBinary is not garbage in garbage out we also test a text format response
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"SELECT p, toTypeName(p) FROM geo_point ORDER BY p ASC FORMAT JSONCompact"
).rows
|> Jason.decode!()
@@ -1305,31 +1448,40 @@ defmodule Ch.ConnectionTest do
]
end
- test "can decode casted Ring", %{conn: conn} do
+ test "can decode casted Ring", ctx do
ring = [{0.0, 1.0}, {10.0, 3.0}]
- assert Ch.query!(conn, "select cast([(0,1),(10,3)] as Ring)").rows == [_row = [ring]]
+
+ assert parameterize_query!(ctx, "select cast([(0,1),(10,3)] as Ring)").rows == [
+ _row = [ring]
+ ]
end
- test "can encode and then decode Ring in query params", %{conn: conn} do
+ test "can encode and then decode Ring in query params", ctx do
ring = [{0.0, 1.0}, {10.0, 3.0}]
- assert Ch.query!(conn, "select {$0:Ring}", [ring]).rows == [_row = [ring]]
+ assert parameterize_query!(ctx, "select {$0:Ring}", [ring]).rows == [_row = [ring]]
end
- test "can insert and select Ring", %{conn: conn} do
- Ch.query!(conn, "CREATE TABLE geo_ring (r Ring) ENGINE = Memory()")
- Ch.query!(conn, "INSERT INTO geo_ring VALUES([(0, 0), (10, 0), (10, 10), (0, 10)])")
+ test "can insert and select Ring", ctx do
+ parameterize_query!(ctx, "CREATE TABLE geo_ring (r Ring) ENGINE = Memory()")
+ on_exit(fn -> Ch.Test.query("DROP TABLE geo_ring") end)
+
+ parameterize_query!(
+ ctx,
+ "INSERT INTO geo_ring VALUES([(0, 0), (10, 0), (10, 10), (0, 10)])"
+ )
ring = [{20, 20}, {0, 0}, {0, 20}]
- Ch.query!(conn, "INSERT INTO geo_ring FORMAT RowBinary", [[ring]], types: ["Ring"])
+ parameterize_query!(ctx, "INSERT INTO geo_ring FORMAT RowBinary", [[ring]], types: ["Ring"])
- assert Ch.query!(conn, "SELECT r, toTypeName(r) FROM geo_ring ORDER BY r ASC").rows == [
- [[{0.0, 0.0}, {10.0, 0.0}, {10.0, 10.0}, {0.0, 10.0}], "Ring"],
- [[{20.0, 20.0}, {0.0, 0.0}, {0.0, 20.0}], "Ring"]
- ]
+ assert parameterize_query!(ctx, "SELECT r, toTypeName(r) FROM geo_ring ORDER BY r ASC").rows ==
+ [
+ [[{0.0, 0.0}, {10.0, 0.0}, {10.0, 10.0}, {0.0, 10.0}], "Ring"],
+ [[{20.0, 20.0}, {0.0, 0.0}, {0.0, 20.0}], "Ring"]
+ ]
# to make our RowBinary is not garbage in garbage out we also test a text format response
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"SELECT r, toTypeName(r) FROM geo_ring ORDER BY r ASC FORMAT JSONCompact"
).rows
|> Jason.decode!()
@@ -1339,31 +1491,39 @@ defmodule Ch.ConnectionTest do
]
end
- test "can decode casted Polygon", %{conn: conn} do
+ test "can decode casted Polygon", ctx do
polygon = [[{0.0, 1.0}, {10.0, 3.0}], [], [{2, 2}]]
- assert Ch.query!(conn, "select cast([[(0,1),(10,3)],[],[(2,2)]] as Polygon)").rows == [
- _row = [polygon]
- ]
+ assert parameterize_query!(ctx, "select cast([[(0,1),(10,3)],[],[(2,2)]] as Polygon)").rows ==
+ [
+ _row = [polygon]
+ ]
end
- test "can encode and then decode Polygon in query params", %{conn: conn} do
+ test "can encode and then decode Polygon in query params", ctx do
polygon = [[{0.0, 1.0}, {10.0, 3.0}], [], [{2, 2}]]
- assert Ch.query!(conn, "select {$0:Polygon}", [polygon]).rows == [_row = [polygon]]
+ assert parameterize_query!(ctx, "select {$0:Polygon}", [polygon]).rows == [_row = [polygon]]
end
- test "can insert and select Polygon", %{conn: conn} do
- Ch.query!(conn, "CREATE TABLE geo_polygon (pg Polygon) ENGINE = Memory()")
+ test "can insert and select Polygon", ctx do
+ parameterize_query!(ctx, "CREATE TABLE geo_polygon (pg Polygon) ENGINE = Memory()")
+ on_exit(fn -> Ch.Test.query("DROP TABLE geo_polygon") end)
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"INSERT INTO geo_polygon VALUES([[(20, 20), (50, 20), (50, 50), (20, 50)], [(30, 30), (50, 50), (50, 30)]])"
)
polygon = [[{0, 1.0}, {10, 3.2}], [], [{2, 2}]]
- Ch.query!(conn, "INSERT INTO geo_polygon FORMAT RowBinary", [[polygon]], types: ["Polygon"])
- assert Ch.query!(conn, "SELECT pg, toTypeName(pg) FROM geo_polygon ORDER BY pg ASC").rows ==
+ parameterize_query!(ctx, "INSERT INTO geo_polygon FORMAT RowBinary", [[polygon]],
+ types: ["Polygon"]
+ )
+
+ assert parameterize_query!(
+ ctx,
+ "SELECT pg, toTypeName(pg) FROM geo_polygon ORDER BY pg ASC"
+ ).rows ==
[
[[[{0.0, 1.0}, {10.0, 3.2}], [], [{2.0, 2.0}]], "Polygon"],
[
@@ -1376,8 +1536,8 @@ defmodule Ch.ConnectionTest do
]
# to make our RowBinary is not garbage in garbage out we also test a text format response
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"SELECT pg, toTypeName(pg) FROM geo_polygon ORDER BY pg ASC FORMAT JSONCompact"
).rows
|> Jason.decode!()
@@ -1390,40 +1550,48 @@ defmodule Ch.ConnectionTest do
]
end
- test "can decode casted MultiPolygon", %{conn: conn} do
+ test "can decode casted MultiPolygon", ctx do
multipolygon = [[[{0.0, 1.0}, {10.0, 3.0}], [], [{2, 2}]], [], [[{3, 3}]]]
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"select cast([[[(0,1),(10,3)],[],[(2,2)]],[],[[(3, 3)]]] as MultiPolygon)"
).rows == [
_row = [multipolygon]
]
end
- test "can encode and then decode MultiPolygon in query params", %{conn: conn} do
+ test "can encode and then decode MultiPolygon in query params", ctx do
multipolygon = [[[{0.0, 1.0}, {10.0, 3.0}], [], [{2, 2}]], [], [[{3, 3}]]]
- assert Ch.query!(conn, "select {$0:MultiPolygon}", [multipolygon]).rows == [
+ assert parameterize_query!(ctx, "select {$0:MultiPolygon}", [multipolygon]).rows == [
_row = [multipolygon]
]
end
- test "can insert and select MultiPolygon", %{conn: conn} do
- Ch.query!(conn, "CREATE TABLE geo_multipolygon (mpg MultiPolygon) ENGINE = Memory()")
+ test "can insert and select MultiPolygon", ctx do
+ parameterize_query!(
+ ctx,
+ "CREATE TABLE geo_multipolygon (mpg MultiPolygon) ENGINE = Memory()"
+ )
+
+ on_exit(fn -> Ch.Test.query("DROP TABLE geo_multipolygon") end)
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"INSERT INTO geo_multipolygon VALUES([[[(0, 0), (10, 0), (10, 10), (0, 10)]], [[(20, 20), (50, 20), (50, 50), (20, 50)],[(30, 30), (50, 50), (50, 30)]]])"
)
multipolygon = [[[{0.0, 1.0}, {10.0, 3.0}], [], [{2, 2}]], [], [[{3, 3}]]]
- Ch.query!(conn, "INSERT INTO geo_multipolygon FORMAT RowBinary", [[multipolygon]],
+ parameterize_query!(ctx, "INSERT INTO geo_multipolygon FORMAT RowBinary", [[multipolygon]],
types: ["MultiPolygon"]
)
- assert Ch.query!(conn, "SELECT mpg, toTypeName(mpg) FROM geo_multipolygon ORDER BY mpg ASC").rows ==
+ assert parameterize_query!(
+ ctx,
+ "SELECT mpg, toTypeName(mpg) FROM geo_multipolygon ORDER BY mpg ASC"
+ ).rows ==
[
_row = [
_multipolygon = [
@@ -1454,8 +1622,8 @@ defmodule Ch.ConnectionTest do
]
# to make our RowBinary is not garbage in garbage out we also test a text format response
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"SELECT mpg, toTypeName(mpg) FROM geo_multipolygon ORDER BY mpg ASC FORMAT JSONCompact"
).rows
|> Jason.decode!()
@@ -1475,16 +1643,16 @@ defmodule Ch.ConnectionTest do
describe "options" do
# this test is flaky, sometimes it raises due to ownership timeout
@tag capture_log: true, skip: true
- test "can provide custom timeout", %{conn: conn} do
+ test "can provide custom timeout", ctx do
assert {:error, %Mint.TransportError{reason: :timeout} = error} =
- Ch.query(conn, "select sleep(1)", _params = [], timeout: 100)
+ parameterize_query(ctx, "select sleep(1)", _params = [], timeout: 100)
assert Exception.message(error) == "timeout"
end
- test "errors on invalid creds", %{conn: conn} do
+ test "errors on invalid creds", ctx do
assert {:error, %Ch.Error{code: 516} = error} =
- Ch.query(conn, "select 1 + 1", _params = [],
+ parameterize_query(ctx, "select 1 + 1", _params = [],
username: "no-exists",
password: "wrong"
)
@@ -1493,42 +1661,43 @@ defmodule Ch.ConnectionTest do
"Code: 516. DB::Exception: no-exists: Authentication failed: password is incorrect, or there is no user with such name. (AUTHENTICATION_FAILED)"
end
- test "errors on invalid database", %{conn: conn} do
+ test "errors on invalid database", ctx do
assert {:error, %Ch.Error{code: 81} = error} =
- Ch.query(conn, "select 1 + 1", _params = [], database: "no-db")
+ parameterize_query(ctx, "select 1 + 1", _params = [], database: "no-db")
assert Exception.message(error) =~ "`no-db`"
assert Exception.message(error) =~ "UNKNOWN_DATABASE"
end
- test "can provide custom database", %{conn: conn} do
+ test "can provide custom database", ctx do
assert {:ok, %{num_rows: 1, rows: [[2]]}} =
- Ch.query(conn, "select 1 + 1", [], database: "default")
+ parameterize_query(ctx, "select 1 + 1", [], database: "default")
end
end
describe "transactions" do
- test "commit", %{conn: conn} do
- DBConnection.transaction(conn, fn conn ->
- Ch.query!(conn, "select 1 + 1")
+ test "commit", ctx do
+ DBConnection.transaction(ctx.conn, fn conn ->
+ ctx = Map.put(ctx, :conn, conn)
+ parameterize_query!(ctx, "select 1 + 1")
end)
end
- test "rollback", %{conn: conn} do
- DBConnection.transaction(conn, fn conn ->
+ test "rollback", ctx do
+ DBConnection.transaction(ctx.conn, fn conn ->
DBConnection.rollback(conn, :some_reason)
end)
end
- test "status", %{conn: conn} do
- assert DBConnection.status(conn) == :idle
+ test "status", ctx do
+ assert DBConnection.status(ctx.conn) == :idle
end
end
describe "stream" do
- test "emits result structs containing raw data", %{conn: conn} do
+ test "emits result structs containing raw data", ctx do
results =
- DBConnection.run(conn, fn conn ->
+ DBConnection.run(ctx.conn, fn conn ->
conn
|> Ch.stream(
"select number from system.numbers limit {limit:UInt64}",
@@ -1546,14 +1715,14 @@ defmodule Ch.ConnectionTest do
|> RowBinary.decode_rows() == Enum.map(0..9999, &[&1])
end
- test "disconnects on early halt", %{conn: conn} do
+ test "disconnects on early halt", ctx do
logs =
ExUnit.CaptureLog.capture_log(fn ->
- Ch.run(conn, fn conn ->
+ Ch.run(ctx.conn, fn conn ->
conn |> Ch.stream("select number from system.numbers") |> Enum.take(1)
end)
- assert Ch.query!(conn, "select 1 + 1").rows == [[2]]
+ assert parameterize_query!(ctx, "select 1 + 1").rows == [[2]]
end)
assert logs =~
@@ -1562,37 +1731,36 @@ defmodule Ch.ConnectionTest do
end
describe "prepare" do
- test "no-op", %{conn: conn} do
+ test "no-op", ctx do
query = Ch.Query.build("select 1 + 1")
assert {:error, %Ch.Error{message: "prepared statements are not supported"}} =
- DBConnection.prepare(conn, query)
+ DBConnection.prepare(ctx.conn, query)
end
end
describe "start_link/1" do
- test "can pass options to start_link/1" do
+ test "can pass options to start_link/1", ctx do
db = "#{Ch.Test.database()}_#{System.unique_integer([:positive])}"
Ch.Test.query("CREATE DATABASE {db:Identifier}", %{"db" => db})
-
- on_exit(fn ->
- Ch.Test.query("DROP DATABASE {db:Identifier}", %{"db" => db})
- end)
+ on_exit(fn -> Ch.Test.query("DROP DATABASE {db:Identifier}", %{"db" => db}) end)
{:ok, conn} = Ch.start_link(database: db)
- Ch.query!(conn, "create table example(a UInt8) engine=Memory")
- assert {:ok, %{rows: [["example"]]}} = Ch.query(conn, "show tables")
+ ctx = Map.put(ctx, :conn, conn)
+ parameterize_query!(ctx, "create table example(a UInt8) engine=Memory")
+ assert {:ok, %{rows: [["example"]]}} = parameterize_query(ctx, "show tables")
end
- test "can start without options" do
+ test "can start without options", ctx do
{:ok, conn} = Ch.start_link()
- assert {:ok, %{num_rows: 1, rows: [[2]]}} = Ch.query(conn, "select 1 + 1")
+ ctx = Map.put(ctx, :conn, conn)
+ assert {:ok, %{num_rows: 1, rows: [[2]]}} = parameterize_query(ctx, "select 1 + 1")
end
end
describe "RowBinaryWithNamesAndTypes" do
- setup %{conn: conn} do
- Ch.query!(conn, """
+ setup ctx do
+ parameterize_query!(ctx, """
create table if not exists row_binary_names_and_types_t (
country_code FixedString(2),
rare_string LowCardinality(String),
@@ -1600,12 +1768,10 @@ defmodule Ch.ConnectionTest do
) engine Memory
""")
- on_exit(fn ->
- Ch.Test.query("truncate row_binary_names_and_types_t", [], database: Ch.Test.database())
- end)
+ on_exit(fn -> Ch.Test.query("truncate row_binary_names_and_types_t") end)
end
- test "error on type mismatch", %{conn: conn} do
+ test "error on type mismatch", ctx do
stmt = "insert into row_binary_names_and_types_t format RowBinaryWithNamesAndTypes"
rows = [["AB", "rare", -42]]
names = ["country_code", "rare_string", "maybe_int32"]
@@ -1615,7 +1781,9 @@ defmodule Ch.ConnectionTest do
types: [Ch.Types.fixed_string(2), Ch.Types.string(), Ch.Types.nullable(Ch.Types.u32())]
]
- assert {:error, %Ch.Error{code: 117, message: message}} = Ch.query(conn, stmt, rows, opts)
+ assert {:error, %Ch.Error{code: 117, message: message}} =
+ parameterize_query(ctx, stmt, rows, opts)
+
assert message =~ "Type of 'rare_string' must be LowCardinality(String), not String"
opts = [
@@ -1627,11 +1795,13 @@ defmodule Ch.ConnectionTest do
]
]
- assert {:error, %Ch.Error{code: 117, message: message}} = Ch.query(conn, stmt, rows, opts)
+ assert {:error, %Ch.Error{code: 117, message: message}} =
+ parameterize_query(ctx, stmt, rows, opts)
+
assert message =~ "Type of 'maybe_int32' must be Nullable(Int32), not Nullable(UInt32)"
end
- test "ok on valid types", %{conn: conn} do
+ test "ok on valid types", ctx do
stmt = "insert into row_binary_names_and_types_t format RowBinaryWithNamesAndTypes"
rows = [["AB", "rare", -42]]
names = ["country_code", "rare_string", "maybe_int32"]
@@ -1645,14 +1815,14 @@ defmodule Ch.ConnectionTest do
]
]
- assert {:ok, %{num_rows: 1}} = Ch.query(conn, stmt, rows, opts)
+ assert {:ok, %{num_rows: 1}} = parameterize_query(ctx, stmt, rows, opts)
end
- test "select with lots of columns", %{conn: conn} do
+ test "select with lots of columns", ctx do
select = Enum.map_join(1..1000, ", ", fn i -> "#{i} as col_#{i}" end)
stmt = "select #{select} format RowBinaryWithNamesAndTypes"
- assert %Ch.Result{columns: columns, rows: [row]} = Ch.query!(conn, stmt)
+ assert %Ch.Result{columns: columns, rows: [row]} = parameterize_query!(ctx, stmt)
assert length(columns) == 1000
assert List.first(columns) == "col_1"
diff --git a/test/ch/dynamic_test.exs b/test/ch/dynamic_test.exs
index 256bc52..7a916d6 100644
--- a/test/ch/dynamic_test.exs
+++ b/test/ch/dynamic_test.exs
@@ -1,5 +1,6 @@
defmodule Ch.DynamicTest do
- use ExUnit.Case
+ use ExUnit.Case, parameterize: [%{query_options: []}, %{query_options: [multipart: true]}]
+ import Ch.Test, only: [parameterize_query!: 2, parameterize_query!: 3, parameterize_query!: 4]
@moduletag :dynamic
@@ -7,24 +8,24 @@ defmodule Ch.DynamicTest do
{:ok, conn: start_supervised!({Ch, database: Ch.Test.database()})}
end
- test "it works", %{conn: conn} do
+ test "it works", ctx do
select = fn literal ->
- [row] = Ch.query!(conn, "select #{literal}::Dynamic as d, dynamicType(d)").rows
+ [row] = parameterize_query!(ctx, "select #{literal}::Dynamic as d, dynamicType(d)").rows
row
end
- Ch.query!(conn, "CREATE TABLE test (d Dynamic, id String) ENGINE = Memory;")
- on_exit(fn -> Ch.Test.query("DROP TABLE test", [], database: Ch.Test.database()) end)
+ parameterize_query!(ctx, "CREATE TABLE test (d Dynamic, id String) ENGINE = Memory;")
+ on_exit(fn -> Ch.Test.query("DROP TABLE test") end)
insert = fn value ->
id = inspect(value)
- Ch.query!(conn, "insert into test(d, id) format RowBinary", [[value, id]],
+ parameterize_query!(ctx, "insert into test(d, id) format RowBinary", [[value, id]],
types: ["Dynamic", "String"]
).rows
[[inserted]] =
- Ch.query!(conn, "select d from test where id = {id:String}", %{"id" => id}).rows
+ parameterize_query!(ctx, "select d from test where id = {id:String}", %{"id" => id}).rows
inserted
end
@@ -96,7 +97,7 @@ defmodule Ch.DynamicTest do
# DateTime 0x11
assert select.("'2020-01-01 12:34:56'::DateTime") == [
- Ch.Test.to_clickhouse_naive(conn, ~N[2020-01-01 12:34:56]),
+ Ch.Test.to_clickhouse_naive(ctx.conn, ~N[2020-01-01 12:34:56]),
"DateTime"
]
@@ -110,7 +111,10 @@ defmodule Ch.DynamicTest do
# DateTime64(P) 0x13
assert select.("'2020-01-01 12:34:56.123456'::DateTime64(6)") ==
- [Ch.Test.to_clickhouse_naive(conn, ~N[2020-01-01 12:34:56.123456]), "DateTime64(6)"]
+ [
+ Ch.Test.to_clickhouse_naive(ctx.conn, ~N[2020-01-01 12:34:56.123456]),
+ "DateTime64(6)"
+ ]
# DateTime64(P, time_zone) 0x14
assert [dt64, "DateTime64(6, 'Europe/Prague')"] =
@@ -266,13 +270,17 @@ defmodule Ch.DynamicTest do
end
# https://clickhouse.com/docs/sql-reference/data-types/dynamic#creating-dynamic
- test "creating dynamic", %{conn: conn} do
+ test "creating dynamic", ctx do
# Using Dynamic type in table column definition:
- Ch.query!(conn, "CREATE TABLE test (d Dynamic) ENGINE = Memory;")
- on_exit(fn -> Ch.Test.query("DROP TABLE test", [], database: Ch.Test.database()) end)
- Ch.query!(conn, "INSERT INTO test VALUES (NULL), (42), ('Hello, World!'), ([1, 2, 3]);")
+ parameterize_query!(ctx, "CREATE TABLE test (d Dynamic) ENGINE = Memory;")
+ on_exit(fn -> Ch.Test.query("DROP TABLE test") end)
+
+ parameterize_query!(
+ ctx,
+ "INSERT INTO test VALUES (NULL), (42), ('Hello, World!'), ([1, 2, 3]);"
+ )
- assert Ch.query!(conn, "SELECT d, dynamicType(d) FROM test;").rows == [
+ assert parameterize_query!(ctx, "SELECT d, dynamicType(d) FROM test;").rows == [
[nil, "None"],
[42, "Int64"],
["Hello, World!", "String"],
@@ -280,13 +288,14 @@ defmodule Ch.DynamicTest do
]
# Using CAST from ordinary column:
- assert Ch.query!(conn, "SELECT 'Hello, World!'::Dynamic AS d, dynamicType(d);").rows == [
- ["Hello, World!", "String"]
- ]
+ assert parameterize_query!(ctx, "SELECT 'Hello, World!'::Dynamic AS d, dynamicType(d);").rows ==
+ [
+ ["Hello, World!", "String"]
+ ]
# Using CAST from Variant column:
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"SELECT multiIf((number % 3) = 0, number, (number % 3) = 1, range(number + 1), NULL)::Dynamic AS d, dynamicType(d) FROM numbers(3)",
[],
settings: [
@@ -301,13 +310,17 @@ defmodule Ch.DynamicTest do
end
# https://clickhouse.com/docs/sql-reference/data-types/dynamic#reading-dynamic-nested-types-as-subcolumns
- test "reading dynamic nested types as subcolumns", %{conn: conn} do
- Ch.query!(conn, "CREATE TABLE test (d Dynamic) ENGINE = Memory;")
- on_exit(fn -> Ch.Test.query("DROP TABLE test", [], database: Ch.Test.database()) end)
- Ch.query!(conn, "INSERT INTO test VALUES (NULL), (42), ('Hello, World!'), ([1, 2, 3]);")
+ test "reading dynamic nested types as subcolumns", ctx do
+ parameterize_query!(ctx, "CREATE TABLE test (d Dynamic) ENGINE = Memory;")
+ on_exit(fn -> Ch.Test.query("DROP TABLE test") end)
+
+ parameterize_query!(
+ ctx,
+ "INSERT INTO test VALUES (NULL), (42), ('Hello, World!'), ([1, 2, 3]);"
+ )
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"SELECT d, dynamicType(d), d.String, d.Int64, d.`Array(Int64)`, d.Date, d.`Array(String)` FROM test;"
).rows == [
[nil, "None", nil, nil, [], nil, []],
@@ -316,8 +329,8 @@ defmodule Ch.DynamicTest do
[[1, 2, 3], "Array(Int64)", nil, nil, [1, 2, 3], nil, []]
]
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"SELECT toTypeName(d.String), toTypeName(d.Int64), toTypeName(d.`Array(Int64)`), toTypeName(d.Date), toTypeName(d.`Array(String)`) FROM test LIMIT 1;"
).rows == [
[
@@ -329,8 +342,8 @@ defmodule Ch.DynamicTest do
]
]
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"SELECT d, dynamicType(d), dynamicElement(d, 'String'), dynamicElement(d, 'Int64'), dynamicElement(d, 'Array(Int64)'), dynamicElement(d, 'Date'), dynamicElement(d, 'Array(String)') FROM test;"
).rows == [
[nil, "None", nil, nil, [], nil, []],
@@ -341,9 +354,9 @@ defmodule Ch.DynamicTest do
end
# https://clickhouse.com/docs/sql-reference/data-types/dynamic#converting-a-string-column-to-a-dynamic-column-through-parsing
- test "converting a string column to a dynamic column through parsing", %{conn: conn} do
- assert Ch.query!(
- conn,
+ test "converting a string column to a dynamic column through parsing", ctx do
+ assert parameterize_query!(
+ ctx,
"SELECT CAST(materialize(map('key1', '42', 'key2', 'true', 'key3', '2020-01-01')), 'Map(String, Dynamic)') as map_of_dynamic, mapApply((k, v) -> (k, dynamicType(v)), map_of_dynamic) as map_of_dynamic_types;",
[],
settings: [cast_string_to_dynamic_use_inference: 1]
@@ -356,12 +369,12 @@ defmodule Ch.DynamicTest do
end
# https://clickhouse.com/docs/sql-reference/data-types/dynamic#converting-a-dynamic-column-to-an-ordinary-column
- test "converting a dynamic column to an ordinary column", %{conn: conn} do
- Ch.query!(conn, "CREATE TABLE test (d Dynamic) ENGINE = Memory;")
- on_exit(fn -> Ch.Test.query("DROP TABLE test", [], database: Ch.Test.database()) end)
- Ch.query!(conn, "INSERT INTO test VALUES (NULL), (42), ('42.42'), (true), ('e10');")
+ test "converting a dynamic column to an ordinary column", ctx do
+ parameterize_query!(ctx, "CREATE TABLE test (d Dynamic) ENGINE = Memory;")
+ on_exit(fn -> Ch.Test.query("DROP TABLE test") end)
+ parameterize_query!(ctx, "INSERT INTO test VALUES (NULL), (42), ('42.42'), (true), ('e10');")
- assert Ch.query!(conn, "SELECT d::Nullable(Float64) FROM test;").rows == [
+ assert parameterize_query!(ctx, "SELECT d::Nullable(Float64) FROM test;").rows == [
[nil],
[42.0],
[42.42],
@@ -371,16 +384,16 @@ defmodule Ch.DynamicTest do
end
# https://clickhouse.com/docs/sql-reference/data-types/dynamic#converting-a-variant-column-to-dynamic-column
- test "converting a variant column to dynamic column", %{conn: conn} do
- Ch.query!(
- conn,
+ test "converting a variant column to dynamic column", ctx do
+ parameterize_query!(
+ ctx,
"CREATE TABLE test (v Variant(UInt64, String, Array(UInt64))) ENGINE = Memory;"
)
- on_exit(fn -> Ch.Test.query("DROP TABLE test", [], database: Ch.Test.database()) end)
- Ch.query!(conn, "INSERT INTO test VALUES (NULL), (42), ('String'), ([1, 2, 3]);")
+ on_exit(fn -> Ch.Test.query("DROP TABLE test") end)
+ parameterize_query!(ctx, "INSERT INTO test VALUES (NULL), (42), ('String'), ([1, 2, 3]);")
- assert Ch.query!(conn, "SELECT v::Dynamic AS d, dynamicType(d) FROM test;").rows == [
+ assert parameterize_query!(ctx, "SELECT v::Dynamic AS d, dynamicType(d) FROM test;").rows == [
[nil, "None"],
[42, "UInt64"],
["String", "String"],
@@ -389,12 +402,19 @@ defmodule Ch.DynamicTest do
end
# https://clickhouse.com/docs/sql-reference/data-types/dynamic#converting-a-dynamicmax_typesn-column-to-another-dynamicmax_typesk
- test "converting a Dynamic(max_types=N) column to another Dynamic(max_types=K)", %{conn: conn} do
- Ch.query!(conn, "CREATE TABLE test (d Dynamic(max_types=4)) ENGINE = Memory;")
- on_exit(fn -> Ch.Test.query("DROP TABLE test", [], database: Ch.Test.database()) end)
- Ch.query!(conn, "INSERT INTO test VALUES (NULL), (42), (43), ('42.42'), (true), ([1, 2, 3]);")
+ test "converting a Dynamic(max_types=N) column to another Dynamic(max_types=K)", ctx do
+ parameterize_query!(ctx, "CREATE TABLE test (d Dynamic(max_types=4)) ENGINE = Memory;")
+ on_exit(fn -> Ch.Test.query("DROP TABLE test") end)
+
+ parameterize_query!(
+ ctx,
+ "INSERT INTO test VALUES (NULL), (42), (43), ('42.42'), (true), ([1, 2, 3]);"
+ )
- assert Ch.query!(conn, "SELECT d::Dynamic(max_types=5) as d2, dynamicType(d2) FROM test;").rows ==
+ assert parameterize_query!(
+ ctx,
+ "SELECT d::Dynamic(max_types=5) as d2, dynamicType(d2) FROM test;"
+ ).rows ==
[
[nil, "None"],
[42, "Int64"],
@@ -404,8 +424,8 @@ defmodule Ch.DynamicTest do
[[1, 2, 3], "Array(Int64)"]
]
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"SELECT d, dynamicType(d), d::Dynamic(max_types=2) as d2, dynamicType(d2), isDynamicElementInSharedData(d2) FROM test;"
).rows == [
[nil, "None", nil, "None", false],
diff --git a/test/ch/faults_test.exs b/test/ch/faults_test.exs
index 9a41f4e..dae3b7d 100644
--- a/test/ch/faults_test.exs
+++ b/test/ch/faults_test.exs
@@ -1,6 +1,6 @@
defmodule Ch.FaultsTest do
alias Ch.Result
- use ExUnit.Case
+ use ExUnit.Case, parameterize: [%{query_options: []}, %{query_options: [multipart: true]}]
import Ch.Test, only: [intercept_packets: 1]
defp capture_async_log(f) do
@@ -18,9 +18,13 @@ defmodule Ch.FaultsTest do
{:ok, clickhouse: clickhouse, listen: listen, port: port}
end
+ setup ctx do
+ {:ok, query_options: ctx[:query_options] || []}
+ end
+
describe "connect/1" do
test "reconnects to eventually reachable server", ctx do
- %{listen: listen, port: port, clickhouse: clickhouse} = ctx
+ %{listen: listen, port: port, clickhouse: clickhouse, query_options: query_options} = ctx
# make the server unreachable
:ok = :gen_tcp.close(listen)
@@ -31,7 +35,7 @@ defmodule Ch.FaultsTest do
log =
capture_async_log(fn ->
assert {:error, %DBConnection.ConnectionError{reason: :queue_timeout}} =
- Ch.query(conn, "select 1 + 1")
+ Ch.query(conn, "select 1 + 1", [], query_options)
# make the server reachable
{:ok, listen} = :gen_tcp.listen(port, @socket_opts)
@@ -42,7 +46,9 @@ defmodule Ch.FaultsTest do
:ok = :gen_tcp.send(mint, intercept_packets(clickhouse))
spawn_link(fn ->
- assert {:ok, %{num_rows: 1, rows: [[2]]}} = Ch.query(conn, "select 1 + 1")
+ assert {:ok, %{num_rows: 1, rows: [[2]]}} =
+ Ch.query(conn, "select 1 + 1", [], query_options)
+
send(test, :done)
end)
@@ -252,7 +258,12 @@ defmodule Ch.FaultsTest do
end
describe "query" do
- test "reconnects after timeout", %{port: port, listen: listen, clickhouse: clickhouse} do
+ test "reconnects after timeout", %{
+ port: port,
+ listen: listen,
+ clickhouse: clickhouse,
+ query_options: query_options
+ } do
test = self()
log =
@@ -268,7 +279,7 @@ defmodule Ch.FaultsTest do
spawn_link(fn ->
assert {:error, %Mint.TransportError{reason: :timeout}} =
- Ch.query(conn, "select 1 + 1")
+ Ch.query(conn, "select 1 + 1", [], query_options)
end)
# failed select 1 + 1
@@ -283,7 +294,9 @@ defmodule Ch.FaultsTest do
:ok = :gen_tcp.send(mint, intercept_packets(clickhouse))
spawn_link(fn ->
- assert {:ok, %{num_rows: 1, rows: [[2]]}} = Ch.query(conn, "select 1 + 1")
+ assert {:ok, %{num_rows: 1, rows: [[2]]}} =
+ Ch.query(conn, "select 1 + 1", [], query_options)
+
send(test, :done)
end)
@@ -298,7 +311,7 @@ defmodule Ch.FaultsTest do
end
test "reconnects after closed on response", ctx do
- %{port: port, listen: listen, clickhouse: clickhouse} = ctx
+ %{port: port, listen: listen, clickhouse: clickhouse, query_options: query_options} = ctx
test = self()
log =
@@ -314,7 +327,7 @@ defmodule Ch.FaultsTest do
spawn_link(fn ->
assert {:error, %Mint.TransportError{reason: :closed}} =
- Ch.query(conn, "select 1 + 1")
+ Ch.query(conn, "select 1 + 1", [], query_options)
end)
# failed select 1 + 1
@@ -330,7 +343,9 @@ defmodule Ch.FaultsTest do
:ok = :gen_tcp.send(mint, intercept_packets(clickhouse))
spawn_link(fn ->
- assert {:ok, %{num_rows: 1, rows: [[2]]}} = Ch.query(conn, "select 1 + 1")
+ assert {:ok, %{num_rows: 1, rows: [[2]]}} =
+ Ch.query(conn, "select 1 + 1", [], query_options)
+
send(test, :done)
end)
@@ -344,7 +359,7 @@ defmodule Ch.FaultsTest do
end
test "reconnects after Connection: close response from server", ctx do
- %{port: port, listen: listen, clickhouse: clickhouse} = ctx
+ %{port: port, listen: listen, clickhouse: clickhouse, query_options: query_options} = ctx
test = self()
log =
@@ -359,7 +374,9 @@ defmodule Ch.FaultsTest do
:ok = :gen_tcp.send(mint, intercept_packets(clickhouse))
spawn_link(fn ->
- assert {:ok, %{num_rows: 1, rows: [[2]]}} = Ch.query(conn, "select 1 + 1")
+ assert {:ok, %{num_rows: 1, rows: [[2]]}} =
+ Ch.query(conn, "select 1 + 1", [], query_options)
+
send(test, :done)
end)
@@ -386,7 +403,7 @@ defmodule Ch.FaultsTest do
spawn_link(fn ->
assert {:ok, %{num_rows: 1, rows: [[2]]}} =
- Ch.query(conn, "select 1 + 1")
+ Ch.query(conn, "select 1 + 1", [], query_options)
send(test, :done)
end)
@@ -405,7 +422,7 @@ defmodule Ch.FaultsTest do
# TODO non-chunked request
test "reconnects after closed before streaming request", ctx do
- %{port: port, listen: listen, clickhouse: clickhouse} = ctx
+ %{port: port, listen: listen, clickhouse: clickhouse, query_options: query_options} = ctx
test = self()
rows = [[1, 2], [3, 4]]
@@ -431,7 +448,7 @@ defmodule Ch.FaultsTest do
conn,
"insert into unknown_table(a,b) format RowBinary",
stream,
- encode: false
+ Keyword.merge(query_options, encode: false)
)
end)
@@ -448,7 +465,7 @@ defmodule Ch.FaultsTest do
conn,
"insert into unknown_table(a,b) format RowBinary",
stream,
- encode: false
+ Keyword.merge(query_options, encode: false)
)
assert message =~ ~r/UNKNOWN_TABLE/
@@ -467,7 +484,7 @@ defmodule Ch.FaultsTest do
end
test "reconnects after closed while streaming request", ctx do
- %{port: port, listen: listen, clickhouse: clickhouse} = ctx
+ %{port: port, listen: listen, clickhouse: clickhouse, query_options: query_options} = ctx
test = self()
rows = [[1, 2], [3, 4]]
@@ -490,7 +507,7 @@ defmodule Ch.FaultsTest do
conn,
"insert into unknown_table(a,b) format RowBinary",
stream,
- encode: false
+ Keyword.merge(query_options, encode: false)
)
end)
@@ -511,7 +528,7 @@ defmodule Ch.FaultsTest do
conn,
"insert into unknown_table(a,b) format RowBinary",
stream,
- encode: false
+ Keyword.merge(query_options, encode: false)
)
assert message =~ ~r/UNKNOWN_TABLE/
@@ -530,7 +547,7 @@ defmodule Ch.FaultsTest do
end
test "warns on different server name", ctx do
- %{port: port, listen: listen, clickhouse: clickhouse} = ctx
+ %{port: port, listen: listen, clickhouse: clickhouse, query_options: query_options} = ctx
test = self()
header = "X-ClickHouse-Server-Display-Name"
@@ -549,7 +566,7 @@ defmodule Ch.FaultsTest do
:ok = :gen_tcp.send(mint, intercept_packets(clickhouse))
spawn_link(fn ->
- assert {:ok, %Result{rows: [[1]]}} = Ch.query(conn, "select 1")
+ assert {:ok, %Result{rows: [[1]]}} = Ch.query(conn, "select 1", [], query_options)
send(test, :done)
end)
diff --git a/test/ch/headers_test.exs b/test/ch/headers_test.exs
index 458e3a3..2d3da43 100644
--- a/test/ch/headers_test.exs
+++ b/test/ch/headers_test.exs
@@ -1,17 +1,28 @@
defmodule Ch.HeadersTest do
- use ExUnit.Case, async: true
+ use ExUnit.Case,
+ async: true,
+ parameterize: [%{query_options: []}, %{query_options: [multipart: true]}]
setup do
{:ok, conn} = Ch.start_link()
{:ok, conn: conn}
end
- test "can request gzipped response through headers", %{conn: conn} do
+ setup ctx do
+ {:ok, query_options: ctx[:query_options] || []}
+ end
+
+ test "can request gzipped response through headers", %{conn: conn, query_options: query_options} do
assert {:ok, %{rows: data, data: data, headers: headers}} =
- Ch.query(conn, "select number from system.numbers limit 100", [],
- decode: false,
- settings: [enable_http_compression: 1],
- headers: [{"accept-encoding", "gzip"}]
+ Ch.query(
+ conn,
+ "select number from system.numbers limit 100",
+ [],
+ Keyword.merge(query_options,
+ decode: false,
+ settings: [enable_http_compression: 1],
+ headers: [{"accept-encoding", "gzip"}]
+ )
)
assert :proplists.get_value("content-type", headers) == "application/octet-stream"
@@ -22,12 +33,17 @@ defmodule Ch.HeadersTest do
assert <<0x1F, 0x8B, _rest::bytes>> = IO.iodata_to_binary(data)
end
- test "can request lz4 response through headers", %{conn: conn} do
+ test "can request lz4 response through headers", %{conn: conn, query_options: query_options} do
assert {:ok, %{rows: data, data: data, headers: headers}} =
- Ch.query(conn, "select number from system.numbers limit 100", [],
- decode: false,
- settings: [enable_http_compression: 1],
- headers: [{"accept-encoding", "lz4"}]
+ Ch.query(
+ conn,
+ "select number from system.numbers limit 100",
+ [],
+ Keyword.merge(query_options,
+ decode: false,
+ settings: [enable_http_compression: 1],
+ headers: [{"accept-encoding", "lz4"}]
+ )
)
assert :proplists.get_value("content-type", headers) == "application/octet-stream"
@@ -38,12 +54,17 @@ defmodule Ch.HeadersTest do
assert <<0x04, 0x22, 0x4D, 0x18, _rest::bytes>> = IO.iodata_to_binary(data)
end
- test "can request zstd response through headers", %{conn: conn} do
+ test "can request zstd response through headers", %{conn: conn, query_options: query_options} do
assert {:ok, %{rows: data, data: data, headers: headers}} =
- Ch.query(conn, "select number from system.numbers limit 100", [],
- decode: false,
- settings: [enable_http_compression: 1],
- headers: [{"accept-encoding", "zstd"}]
+ Ch.query(
+ conn,
+ "select number from system.numbers limit 100",
+ [],
+ Keyword.merge(query_options,
+ decode: false,
+ settings: [enable_http_compression: 1],
+ headers: [{"accept-encoding", "zstd"}]
+ )
)
assert :proplists.get_value("content-type", headers) == "application/octet-stream"
diff --git a/test/ch/http_test.exs b/test/ch/http_test.exs
index f9517ba..c0f802b 100644
--- a/test/ch/http_test.exs
+++ b/test/ch/http_test.exs
@@ -1,37 +1,53 @@
defmodule Ch.HTTPTest do
- use ExUnit.Case, async: true
+ use ExUnit.Case,
+ async: true,
+ parameterize: [%{query_options: []}, %{query_options: [multipart: true]}]
@moduletag :slow
+ setup ctx do
+ {:ok, query_options: ctx[:query_options] || []}
+ end
+
describe "user-agent" do
setup do
{:ok, ch: start_supervised!(Ch)}
end
- test "sets user-agent to ch/ by default", %{ch: ch} do
- %Ch.Result{rows: [[123]], headers: resp_header} = Ch.query!(ch, "select 123")
+ test "sets user-agent to ch/ by default", %{ch: ch, query_options: query_options} do
+ %Ch.Result{rows: [[123]], headers: resp_header} =
+ Ch.query!(ch, "select 123", [], query_options)
+
{"x-clickhouse-query-id", query_id} = List.keyfind!(resp_header, "x-clickhouse-query-id", 0)
- assert query_http_user_agent(ch, query_id) == "ch/" <> Mix.Project.config()[:version]
+
+ assert query_http_user_agent(ch, query_id, query_options) ==
+ "ch/" <> Mix.Project.config()[:version]
end
- test "uses the provided user-agent", %{ch: ch} do
+ test "uses the provided user-agent", %{ch: ch, query_options: query_options} do
req_headers = [{"user-agent", "plausible/0.1.0"}]
%Ch.Result{rows: [[123]], headers: resp_header} =
- Ch.query!(ch, "select 123", _params = [], headers: req_headers)
+ Ch.query!(
+ ch,
+ "select 123",
+ _params = [],
+ Keyword.merge(query_options, headers: req_headers)
+ )
{"x-clickhouse-query-id", query_id} = List.keyfind!(resp_header, "x-clickhouse-query-id", 0)
- assert query_http_user_agent(ch, query_id) == "plausible/0.1.0"
+ assert query_http_user_agent(ch, query_id, query_options) == "plausible/0.1.0"
end
end
- defp query_http_user_agent(ch, query_id) do
+ defp query_http_user_agent(ch, query_id, query_options) do
retry(fn ->
%Ch.Result{rows: [[user_agent]]} =
Ch.query!(
ch,
"select http_user_agent from system.query_log where query_id = {query_id:String} limit 1",
- %{"query_id" => query_id}
+ %{"query_id" => query_id},
+ query_options
)
user_agent
diff --git a/test/ch/json_test.exs b/test/ch/json_test.exs
index e402d8d..b41a82d 100644
--- a/test/ch/json_test.exs
+++ b/test/ch/json_test.exs
@@ -1,19 +1,20 @@
defmodule Ch.JSONTest do
- use ExUnit.Case
+ use ExUnit.Case, parameterize: [%{query_options: []}, %{query_options: [multipart: true]}]
@moduletag :json
- setup do
- on_exit(fn ->
- Ch.Test.query("DROP TABLE IF EXISTS json_test", [], database: Ch.Test.database())
- end)
+ setup ctx do
+ {:ok, query_options: ctx[:query_options] || []}
+ end
+ setup do
+ on_exit(fn -> Ch.Test.query("DROP TABLE IF EXISTS json_test") end)
{:ok, conn: start_supervised!({Ch, database: Ch.Test.database()})}
end
- test "simple json", %{conn: conn} do
+ test "simple json", %{conn: conn, query_options: query_options} do
select = fn literal ->
- [[value]] = Ch.query!(conn, "select '#{literal}'::json").rows
+ [[value]] = Ch.query!(conn, "select '#{literal}'::json", [], query_options).rows
value
end
@@ -54,19 +55,31 @@ defmodule Ch.JSONTest do
end
# https://clickhouse.com/docs/sql-reference/data-types/newjson#using-json-in-a-table-column-definition
- test "basic", %{conn: conn} do
- Ch.query!(conn, "CREATE TABLE json_test (json JSON, id UInt8) ENGINE = Memory")
+ test "basic", %{conn: conn, query_options: query_options} do
+ Ch.query!(
+ conn,
+ "CREATE TABLE json_test (json JSON, id UInt8) ENGINE = Memory",
+ [],
+ query_options
+ )
- Ch.query!(conn, """
- INSERT INTO json_test VALUES
- ('{"a" : {"b" : 42}, "c" : [1, 2, 3]}', 0),
- ('{"f" : "Hello, World!"}', 1),
- ('{"a" : {"b" : 43, "e" : 10}, "c" : [4, 5, 6]}', 2)
- """)
+ Ch.query!(
+ conn,
+ """
+ INSERT INTO json_test VALUES
+ ('{"a" : {"b" : 42}, "c" : [1, 2, 3]}', 0),
+ ('{"f" : "Hello, World!"}', 1),
+ ('{"a" : {"b" : 43, "e" : 10}, "c" : [4, 5, 6]}', 2)
+ """,
+ [],
+ query_options
+ )
assert Ch.query!(
conn,
- "SELECT json FROM json_test ORDER BY id"
+ "SELECT json FROM json_test ORDER BY id",
+ [],
+ query_options
).rows == [
[%{"a" => %{"b" => 42}, "c" => [1, 2, 3]}],
[%{"f" => "Hello, World!"}],
@@ -77,17 +90,24 @@ defmodule Ch.JSONTest do
conn,
"INSERT INTO json_test(json, id) FORMAT RowBinary",
[[%{"a" => %{"b" => 999}, "some other" => "json value", "from" => "rowbinary"}, 3]],
- types: ["JSON", "UInt8"]
+ Keyword.merge(query_options, types: ["JSON", "UInt8"])
)
assert Ch.query!(
conn,
- "SELECT json FROM json_test where json.from = 'rowbinary'"
+ "SELECT json FROM json_test where json.from = 'rowbinary'",
+ [],
+ query_options
).rows == [
[%{"from" => "rowbinary", "some other" => "json value", "a" => %{"b" => 999}}]
]
- assert Ch.query!(conn, "select json.a.b, json.a.g, json.c, json.d from json_test order by id").rows ==
+ assert Ch.query!(
+ conn,
+ "select json.a.b, json.a.g, json.c, json.d from json_test order by id",
+ [],
+ query_options
+ ).rows ==
[
[42, nil, [1, 2, 3], nil],
[nil, nil, nil, nil],
@@ -97,19 +117,31 @@ defmodule Ch.JSONTest do
end
# https://clickhouse.com/docs/sql-reference/data-types/newjson#using-json-in-a-table-column-definition
- test "with skip (i.e. extra type options)", %{conn: conn} do
- Ch.query!(conn, "CREATE TABLE json_test (json JSON(a.b UInt32, SKIP a.e)) ENGINE = Memory;")
+ test "with skip (i.e. extra type options)", %{conn: conn, query_options: query_options} do
+ Ch.query!(
+ conn,
+ "CREATE TABLE json_test (json JSON(a.b UInt32, SKIP a.e)) ENGINE = Memory;",
+ [],
+ query_options
+ )
- Ch.query!(conn, """
- INSERT INTO json_test VALUES
- ('{"a" : {"b" : 42}, "c" : [1, 2, 3]}'),
- ('{"f" : "Hello, World!"}'),
- ('{"a" : {"b" : 43, "e" : 10}, "c" : [4, 5, 6]}');
- """)
+ Ch.query!(
+ conn,
+ """
+ INSERT INTO json_test VALUES
+ ('{"a" : {"b" : 42}, "c" : [1, 2, 3]}'),
+ ('{"f" : "Hello, World!"}'),
+ ('{"a" : {"b" : 43, "e" : 10}, "c" : [4, 5, 6]}');
+ """,
+ [],
+ query_options
+ )
assert Ch.query!(
conn,
- "SELECT json FROM json_test"
+ "SELECT json FROM json_test",
+ [],
+ query_options
).rows == [
[%{"a" => %{"b" => 42}, "c" => [1, 2, 3]}],
[%{"a" => %{"b" => 0}, "f" => "Hello, World!"}],
@@ -118,40 +150,60 @@ defmodule Ch.JSONTest do
end
# https://clickhouse.com/docs/sql-reference/data-types/newjson#reading-json-paths-as-sub-columns
- test "reading json paths as subcolumns", %{conn: conn} do
- Ch.query!(conn, "CREATE TABLE json_test (json JSON(a.b UInt32, SKIP a.e)) ENGINE = Memory")
+ test "reading json paths as subcolumns", %{conn: conn, query_options: query_options} do
+ Ch.query!(
+ conn,
+ "CREATE TABLE json_test (json JSON(a.b UInt32, SKIP a.e)) ENGINE = Memory",
+ [],
+ query_options
+ )
- Ch.query!(conn, """
- INSERT INTO json_test VALUES
- ('{"a" : {"b" : 42, "g" : 42.42}, "c" : [1, 2, 3], "d" : "2020-01-01"}'),
- ('{"f" : "Hello, World!", "d" : "2020-01-02"}'),
- ('{"a" : {"b" : 43, "e" : 10, "g" : 43.43}, "c" : [4, 5, 6]}');
- """)
+ Ch.query!(
+ conn,
+ """
+ INSERT INTO json_test VALUES
+ ('{"a" : {"b" : 42, "g" : 42.42}, "c" : [1, 2, 3], "d" : "2020-01-01"}'),
+ ('{"f" : "Hello, World!", "d" : "2020-01-02"}'),
+ ('{"a" : {"b" : 43, "e" : 10, "g" : 43.43}, "c" : [4, 5, 6]}');
+ """,
+ [],
+ query_options
+ )
assert Ch.query!(
conn,
- "SELECT json FROM json_test"
+ "SELECT json FROM json_test",
+ [],
+ query_options
).rows == [
[%{"a" => %{"b" => 42, "g" => 42.42}, "c" => [1, 2, 3], "d" => "2020-01-01"}],
[%{"a" => %{"b" => 0}, "d" => "2020-01-02", "f" => "Hello, World!"}],
[%{"a" => %{"b" => 43, "g" => 43.43}, "c" => [4, 5, 6]}]
]
- assert Ch.query!(conn, "SELECT json.a.b, json.a.g, json.c, json.d FROM json_test").rows == [
+ assert Ch.query!(
+ conn,
+ "SELECT json.a.b, json.a.g, json.c, json.d FROM json_test",
+ [],
+ query_options
+ ).rows == [
[42, 42.42, [1, 2, 3], ~D[2020-01-01]],
[0, nil, nil, ~D[2020-01-02]],
[43, 43.43, [4, 5, 6], nil]
]
- assert Ch.query!(conn, "SELECT json.non.existing.path FROM json_test").rows == [
- [nil],
- [nil],
- [nil]
- ]
+ assert Ch.query!(conn, "SELECT json.non.existing.path FROM json_test", [], query_options).rows ==
+ [
+ [nil],
+ [nil],
+ [nil]
+ ]
assert Ch.query!(
conn,
- "SELECT toTypeName(json.a.b), toTypeName(json.a.g), toTypeName(json.c), toTypeName(json.d) FROM json_test;"
+ "SELECT toTypeName(json.a.b), toTypeName(json.a.g), toTypeName(json.c), toTypeName(json.d) FROM json_test;",
+ [],
+ query_options
).rows == [
["UInt32", "Dynamic", "Dynamic", "Dynamic"],
["UInt32", "Dynamic", "Dynamic", "Dynamic"],
@@ -167,7 +219,9 @@ defmodule Ch.JSONTest do
json.d.:Date,
dynamicType(json.d)
FROM json_test
- """
+ """,
+ [],
+ query_options
).rows == [
[42.42, "Float64", ~D[2020-01-01], "Date"],
[nil, "None", ~D[2020-01-02], "Date"],
@@ -179,7 +233,9 @@ defmodule Ch.JSONTest do
"""
SELECT json.a.g::UInt64 AS uint
FROM json_test;
- """
+ """,
+ [],
+ query_options
).rows == [
[42],
[0],
@@ -187,22 +243,27 @@ defmodule Ch.JSONTest do
]
assert_raise Ch.Error, ~r/Conversion between numeric types and UUID is not supported/, fn ->
- Ch.query!(conn, "SELECT json.a.g::UUID AS float FROM json_test;")
+ Ch.query!(conn, "SELECT json.a.g::UUID AS float FROM json_test;", [], query_options)
end
end
# https://clickhouse.com/docs/sql-reference/data-types/newjson#reading-json-sub-objects-as-sub-columns
- test "reading json subobjects as subcolumns", %{conn: conn} do
- Ch.query!(conn, "CREATE TABLE json_test (json JSON) ENGINE = Memory;")
+ test "reading json subobjects as subcolumns", %{conn: conn, query_options: query_options} do
+ Ch.query!(conn, "CREATE TABLE json_test (json JSON) ENGINE = Memory;", [], query_options)
- Ch.query!(conn, """
- INSERT INTO json_test VALUES
- ('{"a" : {"b" : {"c" : 42, "g" : 42.42}}, "c" : [1, 2, 3], "d" : {"e" : {"f" : {"g" : "Hello, World", "h" : [1, 2, 3]}}}}'),
- ('{"f" : "Hello, World!", "d" : {"e" : {"f" : {"h" : [4, 5, 6]}}}}'),
- ('{"a" : {"b" : {"c" : 43, "e" : 10, "g" : 43.43}}, "c" : [4, 5, 6]}');
- """)
+ Ch.query!(
+ conn,
+ """
+ INSERT INTO json_test VALUES
+ ('{"a" : {"b" : {"c" : 42, "g" : 42.42}}, "c" : [1, 2, 3], "d" : {"e" : {"f" : {"g" : "Hello, World", "h" : [1, 2, 3]}}}}'),
+ ('{"f" : "Hello, World!", "d" : {"e" : {"f" : {"h" : [4, 5, 6]}}}}'),
+ ('{"a" : {"b" : {"c" : 43, "e" : 10, "g" : 43.43}}, "c" : [4, 5, 6]}');
+ """,
+ [],
+ query_options
+ )
- assert Ch.query!(conn, "SELECT json FROM json_test;").rows == [
+ assert Ch.query!(conn, "SELECT json FROM json_test;", [], query_options).rows == [
[
%{
"a" => %{"b" => %{"c" => 42, "g" => 42.42}},
@@ -219,26 +280,32 @@ defmodule Ch.JSONTest do
]
]
- assert Ch.query!(conn, "SELECT json.^a.b, json.^d.e.f FROM json_test;").rows == [
- [%{"c" => 42, "g" => 42.42}, %{"g" => "Hello, World", "h" => [1, 2, 3]}],
- [%{}, %{"h" => [4, 5, 6]}],
- [%{"c" => 43, "e" => 10, "g" => 43.43}, %{}]
- ]
+ assert Ch.query!(conn, "SELECT json.^a.b, json.^d.e.f FROM json_test;", [], query_options).rows ==
+ [
+ [%{"c" => 42, "g" => 42.42}, %{"g" => "Hello, World", "h" => [1, 2, 3]}],
+ [%{}, %{"h" => [4, 5, 6]}],
+ [%{"c" => 43, "e" => 10, "g" => 43.43}, %{}]
+ ]
end
# TODO
# https://clickhouse.com/docs/sql-reference/data-types/newjson#handling-arrays-of-json-objects
- test "handling arrays of json objects", %{conn: conn} do
- Ch.query!(conn, "CREATE TABLE json_test (json JSON) ENGINE = Memory;")
+ test "handling arrays of json objects", %{conn: conn, query_options: query_options} do
+ Ch.query!(conn, "CREATE TABLE json_test (json JSON) ENGINE = Memory;", [], query_options)
- Ch.query!(conn, """
- INSERT INTO json_test VALUES
- ('{"a" : {"b" : [{"c" : 42, "d" : "Hello", "f" : [[{"g" : 42.42}]], "k" : {"j" : 1000}}, {"c" : 43}, {"e" : [1, 2, 3], "d" : "My", "f" : [[{"g" : 43.43, "h" : "2020-01-01"}]], "k" : {"j" : 2000}}]}}'),
- ('{"a" : {"b" : [1, 2, 3]}}'),
- ('{"a" : {"b" : [{"c" : 44, "f" : [[{"h" : "2020-01-02"}]]}, {"e" : [4, 5, 6], "d" : "World", "f" : [[{"g" : 44.44}]], "k" : {"j" : 3000}}]}}');
- """)
+ Ch.query!(
+ conn,
+ """
+ INSERT INTO json_test VALUES
+ ('{"a" : {"b" : [{"c" : 42, "d" : "Hello", "f" : [[{"g" : 42.42}]], "k" : {"j" : 1000}}, {"c" : 43}, {"e" : [1, 2, 3], "d" : "My", "f" : [[{"g" : 43.43, "h" : "2020-01-01"}]], "k" : {"j" : 2000}}]}}'),
+ ('{"a" : {"b" : [1, 2, 3]}}'),
+ ('{"a" : {"b" : [{"c" : 44, "f" : [[{"h" : "2020-01-02"}]]}, {"e" : [4, 5, 6], "d" : "World", "f" : [[{"g" : 44.44}]], "k" : {"j" : 3000}}]}}');
+ """,
+ [],
+ query_options
+ )
- assert Ch.query!(conn, "SELECT json FROM json_test;").rows == [
+ assert Ch.query!(conn, "SELECT json FROM json_test;", [], query_options).rows == [
[
%{
"a" => %{
@@ -280,13 +347,15 @@ defmodule Ch.JSONTest do
# TODO
assert_raise ArgumentError, "unsupported dynamic type JSON", fn ->
- Ch.query!(conn, "SELECT json.a.b, dynamicType(json.a.b) FROM json_test;")
+ Ch.query!(conn, "SELECT json.a.b, dynamicType(json.a.b) FROM json_test;", [], query_options)
end
assert_raise ArgumentError, "unsupported dynamic type JSON", fn ->
Ch.query!(
conn,
- "SELECT json.a.b.:`Array(JSON)`.c, json.a.b.:`Array(JSON)`.f, json.a.b.:`Array(JSON)`.d FROM json_test;"
+ "SELECT json.a.b.:`Array(JSON)`.c, json.a.b.:`Array(JSON)`.f, json.a.b.:`Array(JSON)`.d FROM json_test;",
+ [],
+ query_options
)
end
end
diff --git a/test/ch/query_string_test.exs b/test/ch/query_string_test.exs
index 6dcd829..398ab3e 100644
--- a/test/ch/query_string_test.exs
+++ b/test/ch/query_string_test.exs
@@ -1,5 +1,11 @@
defmodule Ch.QueryStringTest do
- use ExUnit.Case, async: true
+ use ExUnit.Case,
+ async: true,
+ parameterize: [%{query_options: []}, %{query_options: [multipart: true]}]
+
+ setup ctx do
+ {:ok, query_options: ctx[:query_options] || []}
+ end
setup do
{:ok, conn: start_supervised!(Ch)}
@@ -8,16 +14,21 @@ defmodule Ch.QueryStringTest do
# For more info see
# https://clickhouse.com/docs/en/interfaces/http#tabs-in-url-parameters
# "escaped" format is the same as https://clickhouse.com/docs/en/interfaces/formats#tabseparated-data-formatting
- test "binaries are escaped properly", %{conn: conn} do
+ test "binaries are escaped properly", %{conn: conn, query_options: query_options} do
for s <- ["\t", "\n", "\\", "'", "\b", "\f", "\r", "\0"] do
- assert Ch.query!(conn, "select {s:String}", %{"s" => s}).rows == [[s]]
+ assert Ch.query!(conn, "select {s:String}", %{"s" => s}, query_options).rows == [[s]]
end
# example from https://clickhouse.com/docs/en/interfaces/http#tabs-in-url-parameters
- assert Ch.query!(conn, "select splitByChar('\t', 'abc\t123')").rows ==
+ assert Ch.query!(conn, "select splitByChar('\t', 'abc\t123')", [], query_options).rows ==
[[["abc", "123"]]]
- assert Ch.query!(conn, "select splitByChar('\t', {arg1:String})", %{"arg1" => "abc\t123"}).rows ==
+ assert Ch.query!(
+ conn,
+ "select splitByChar('\t', {arg1:String})",
+ %{"arg1" => "abc\t123"},
+ query_options
+ ).rows ==
[[["abc", "123"]]]
end
end
diff --git a/test/ch/query_test.exs b/test/ch/query_test.exs
index 56fef45..056cb0c 100644
--- a/test/ch/query_test.exs
+++ b/test/ch/query_test.exs
@@ -1,7 +1,14 @@
defmodule Ch.QueryTest do
- use ExUnit.Case, async: true
+ use ExUnit.Case,
+ async: true,
+ parameterize: [%{query_options: []}, %{query_options: [multipart: true]}]
+
alias Ch.Query
+ setup ctx do
+ {:ok, query_options: ctx[:query_options] || []}
+ end
+
test "to_string" do
query = Query.build(["select ", 1 + ?0, ?+, 2 + ?0])
assert to_string(query) == "select 1+2"
@@ -49,51 +56,59 @@ defmodule Ch.QueryTest do
{:ok, conn: start_supervised!({Ch, database: Ch.Test.database()})}
end
- test "iodata", %{conn: conn} do
- assert [[123]] = Ch.query!(conn, ["S", ?E, ["LEC" | "T"], " ", ~c"123"]).rows
+ test "iodata", %{conn: conn, query_options: query_options} do
+ assert [[123]] =
+ Ch.query!(conn, ["S", ?E, ["LEC" | "T"], " ", ~c"123"], [], query_options).rows
end
- test "decode basic types", %{conn: conn} do
- assert [[nil]] = Ch.query!(conn, "SELECT NULL").rows
- assert [[true, false]] = Ch.query!(conn, "SELECT true, false").rows
- assert [["e"]] = Ch.query!(conn, "SELECT 'e'::char").rows
- assert [["ẽ"]] = Ch.query!(conn, "SELECT 'ẽ'::char").rows
- assert [[42]] = Ch.query!(conn, "SELECT 42").rows
- assert [[42.0]] = Ch.query!(conn, "SELECT 42::float").rows
- assert [[42.0]] = Ch.query!(conn, "SELECT 42.0").rows
+ test "decode basic types", %{conn: conn, query_options: query_options} do
+ assert [[nil]] = Ch.query!(conn, "SELECT NULL", [], query_options).rows
+ assert [[true, false]] = Ch.query!(conn, "SELECT true, false", [], query_options).rows
+ assert [["e"]] = Ch.query!(conn, "SELECT 'e'::char", [], query_options).rows
+ assert [["ẽ"]] = Ch.query!(conn, "SELECT 'ẽ'::char", [], query_options).rows
+ assert [[42]] = Ch.query!(conn, "SELECT 42", [], query_options).rows
+ assert [[42.0]] = Ch.query!(conn, "SELECT 42::float", [], query_options).rows
+ assert [[42.0]] = Ch.query!(conn, "SELECT 42.0", [], query_options).rows
# TODO [[:NaN]] ?
- assert [[nil]] = Ch.query!(conn, "SELECT 'NaN'::float").rows
+ assert [[nil]] = Ch.query!(conn, "SELECT 'NaN'::float", [], query_options).rows
# TODO [[:int]] ?
- assert [[nil]] = Ch.query!(conn, "SELECT 'inf'::float").rows
+ assert [[nil]] = Ch.query!(conn, "SELECT 'inf'::float", [], query_options).rows
# TODO [[:"-inf"]] ?
- assert [[nil]] = Ch.query!(conn, "SELECT '-inf'::float").rows
- assert [["ẽric"]] = Ch.query!(conn, "SELECT 'ẽric'").rows
- assert [["ẽric"]] = Ch.query!(conn, "SELECT 'ẽric'::varchar").rows
+ assert [[nil]] = Ch.query!(conn, "SELECT '-inf'::float", [], query_options).rows
+ assert [["ẽric"]] = Ch.query!(conn, "SELECT 'ẽric'", [], query_options).rows
+ assert [["ẽric"]] = Ch.query!(conn, "SELECT 'ẽric'::varchar", [], query_options).rows
# TODO
# assert [[<<1, 2, 3>>]] = Ch.query!(conn, "SELECT '\\001\\002\\003'::bytea").rows
end
- test "decode numeric", %{conn: conn} do
- assert [[Decimal.new("42.0000000000")]] == Ch.query!(conn, "SELECT 42::numeric(10,10)").rows
+ test "decode numeric", %{conn: conn, query_options: query_options} do
+ assert [[Decimal.new("42.0000000000")]] ==
+ Ch.query!(conn, "SELECT 42::numeric(10,10)", [], query_options).rows
end
@tag skip: true
- test "decode json/jsonb", %{conn: conn} do
+ test "decode json/jsonb", %{conn: conn, query_options: query_options} do
assert_raise ArgumentError, "Object('json') type is not supported", fn ->
- assert [[%{"foo" => 42}]] == Ch.query!(conn, "SELECT '{\"foo\": 42}'::json").rows
+ assert [[%{"foo" => 42}]] ==
+ Ch.query!(conn, "SELECT '{\"foo\": 42}'::json", [], query_options).rows
end
end
- test "decode uuid", %{conn: conn} do
+ test "decode uuid", %{conn: conn, query_options: query_options} do
uuid = <<160, 238, 188, 153, 156, 11, 78, 248, 187, 109, 107, 185, 189, 56, 10, 17>>
assert [[^uuid]] =
- Ch.query!(conn, "SELECT 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'::UUID").rows
+ Ch.query!(
+ conn,
+ "SELECT 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'::UUID",
+ [],
+ query_options
+ ).rows
end
# https://clickhouse.com/docs/sql-reference/data-types/time
@tag :time
- test "decode time", %{conn: conn} do
+ test "decode time", %{conn: conn, query_options: query_options} do
settings = [enable_time_time64_type: 1]
times = [
@@ -105,10 +120,20 @@ defmodule Ch.QueryTest do
for time <- times do
%{value: value, expected: expected} = time
- assert Ch.query!(conn, "SELECT '#{value}'::time", [], settings: settings).rows ==
+ assert Ch.query!(
+ conn,
+ "SELECT '#{value}'::time",
+ [],
+ Keyword.merge(query_options, settings: settings)
+ ).rows ==
[[expected]]
- assert Ch.query!(conn, "SELECT {time:Time}", %{"time" => expected}, settings: settings).rows ==
+ assert Ch.query!(
+ conn,
+ "SELECT {time:Time}",
+ %{"time" => expected},
+ Keyword.merge(query_options, settings: settings)
+ ).rows ==
[[expected]]
end
@@ -118,29 +143,53 @@ defmodule Ch.QueryTest do
assert_raise ArgumentError,
"ClickHouse Time value -1.0 (seconds) is out of Elixir's Time range (00:00:00.000000 - 23:59:59.999999)",
- fn -> Ch.query!(conn, "SELECT '-00:00:01'::time", [], settings: settings) end
+ fn ->
+ Ch.query!(
+ conn,
+ "SELECT '-00:00:01'::time",
+ [],
+ Keyword.merge(query_options, settings: settings)
+ )
+ end
assert_raise ArgumentError,
"ClickHouse Time value 3599999.0 (seconds) is out of Elixir's Time range (00:00:00.000000 - 23:59:59.999999)",
- fn -> Ch.query!(conn, "SELECT '999:59:59'::time", [], settings: settings) end
+ fn ->
+ Ch.query!(
+ conn,
+ "SELECT '999:59:59'::time",
+ [],
+ Keyword.merge(query_options, settings: settings)
+ )
+ end
assert_raise ArgumentError,
"ClickHouse Time value -3599999.0 (seconds) is out of Elixir's Time range (00:00:00.000000 - 23:59:59.999999)",
- fn -> Ch.query!(conn, "SELECT '-999:59:59'::time", [], settings: settings) end
+ fn ->
+ Ch.query!(
+ conn,
+ "SELECT '-999:59:59'::time",
+ [],
+ Keyword.merge(query_options, settings: settings)
+ )
+ end
# ** (Ch.Error) Code: 457. DB::Exception: Value 12:34:56.123456 cannot be parsed as Time for query parameter 'time'
# because it isn't parsed completely: only 8 of 15 bytes was parsed: 12:34:56. (BAD_QUERY_PARAMETER)
# (version 25.6.3.116 (official build))
assert_raise Ch.Error, ~r/only 8 of 15 bytes was parsed/, fn ->
- Ch.query!(conn, "SELECT {time:Time}", %{"time" => ~T[12:34:56.123456]},
- settings: settings
+ Ch.query!(
+ conn,
+ "SELECT {time:Time}",
+ %{"time" => ~T[12:34:56.123456]},
+ Keyword.merge(query_options, settings: settings)
)
end
end
# https://clickhouse.com/docs/sql-reference/data-types/time64
@tag :time
- test "decode time64", %{conn: conn} do
+ test "decode time64", %{conn: conn, query_options: query_options} do
settings = [enable_time_time64_type: 1]
times = [
@@ -175,14 +224,19 @@ defmodule Ch.QueryTest do
for time <- times do
%{value: value, precision: precision, expected: expected} = time
- assert Ch.query!(conn, "SELECT '#{value}'::time64(#{precision})", [], settings: settings).rows ==
+ assert Ch.query!(
+ conn,
+ "SELECT '#{value}'::time64(#{precision})",
+ [],
+ Keyword.merge(query_options, settings: settings)
+ ).rows ==
[[expected]]
assert Ch.query!(
conn,
"SELECT {time:time64(#{precision})}",
%{"time" => expected},
- settings: settings
+ Keyword.merge(query_options, settings: settings)
).rows ==
[[expected]]
end
@@ -194,79 +248,110 @@ defmodule Ch.QueryTest do
assert_raise ArgumentError,
"ClickHouse Time value -1.0 (seconds) is out of Elixir's Time range (00:00:00.000000 - 23:59:59.999999)",
fn ->
- Ch.query!(conn, "SELECT '-00:00:01.000'::time64(6)", [], settings: settings)
+ Ch.query!(
+ conn,
+ "SELECT '-00:00:01.000'::time64(6)",
+ [],
+ Keyword.merge(query_options, settings: settings)
+ )
end
assert_raise ArgumentError,
"ClickHouse Time value 3599999.999999 (seconds) is out of Elixir's Time range (00:00:00.000000 - 23:59:59.999999)",
fn ->
- Ch.query!(conn, "SELECT '999:59:59.999999999'::time64(6)", [],
- settings: settings
+ Ch.query!(
+ conn,
+ "SELECT '999:59:59.999999999'::time64(6)",
+ [],
+ Keyword.merge(query_options, settings: settings)
)
end
assert_raise ArgumentError,
"ClickHouse Time value -3599999.999999 (seconds) is out of Elixir's Time range (00:00:00.000000 - 23:59:59.999999)",
fn ->
- Ch.query!(conn, "SELECT '-999:59:59.999999999'::time64(6)", [],
- settings: settings
+ Ch.query!(
+ conn,
+ "SELECT '-999:59:59.999999999'::time64(6)",
+ [],
+ Keyword.merge(query_options, settings: settings)
)
end
end
- test "decode arrays", %{conn: conn} do
- assert [[[]]] = Ch.query!(conn, "SELECT []").rows
- assert [[[1]]] = Ch.query!(conn, "SELECT [1]").rows
- assert [[[1, 2]]] = Ch.query!(conn, "SELECT [1,2]").rows
- assert [[[[0], [1]]]] = Ch.query!(conn, "SELECT [[0],[1]]").rows
- assert [[[[0]]]] = Ch.query!(conn, "SELECT [[0]]").rows
+ test "decode arrays", %{conn: conn, query_options: query_options} do
+ assert [[[]]] = Ch.query!(conn, "SELECT []", [], query_options).rows
+ assert [[[1]]] = Ch.query!(conn, "SELECT [1]", [], query_options).rows
+ assert [[[1, 2]]] = Ch.query!(conn, "SELECT [1,2]", [], query_options).rows
+ assert [[[[0], [1]]]] = Ch.query!(conn, "SELECT [[0],[1]]", [], query_options).rows
+ assert [[[[0]]]] = Ch.query!(conn, "SELECT [[0]]", [], query_options).rows
end
- test "decode tuples", %{conn: conn} do
- assert [[{"Hello", 123}]] = Ch.query!(conn, "select ('Hello', 123)").rows
- assert [[{"Hello", 123}]] = Ch.query!(conn, "select ('Hello' as a, 123 as b)").rows
- assert [[{"Hello", 123}]] = Ch.query!(conn, "select ('Hello' as a_, 123 as b)").rows
+ test "decode tuples", %{conn: conn, query_options: query_options} do
+ assert [[{"Hello", 123}]] = Ch.query!(conn, "select ('Hello', 123)", [], query_options).rows
+
+ assert [[{"Hello", 123}]] =
+ Ch.query!(conn, "select ('Hello' as a, 123 as b)", [], query_options).rows
+
+ assert [[{"Hello", 123}]] =
+ Ch.query!(conn, "select ('Hello' as a_, 123 as b)", [], query_options).rows
+
# TODO
- # assert [[{"Hello", 123}]] = Ch.query!(conn, "select ('Hello' as a$, 123 as b)").rows
+ # assert [[{"Hello", 123}]] = Ch.query!(conn, "select ('Hello' as a$, 123 as b)", [], query_options).rows
end
- test "decode network types", %{conn: conn} do
- assert [[{127, 0, 0, 1} = ipv4]] = Ch.query!(conn, "SELECT '127.0.0.1'::inet4").rows
+ test "decode network types", %{conn: conn, query_options: query_options} do
+ assert [[{127, 0, 0, 1} = ipv4]] =
+ Ch.query!(conn, "SELECT '127.0.0.1'::inet4", [], query_options).rows
+
assert :inet.ntoa(ipv4) == ~c"127.0.0.1"
- assert [[{0, 0, 0, 0, 0, 0, 0, 1} = ipv6]] = Ch.query!(conn, "SELECT '::1'::inet6").rows
+ assert [[{0, 0, 0, 0, 0, 0, 0, 1} = ipv6]] =
+ Ch.query!(conn, "SELECT '::1'::inet6", [], query_options).rows
+
assert :inet.ntoa(ipv6) == ~c"::1"
- assert [[ipv6]] = Ch.query!(conn, "SELECT '2001:44c8:129:2632:33:0:252:2'::inet6").rows
+ assert [[ipv6]] =
+ Ch.query!(conn, "SELECT '2001:44c8:129:2632:33:0:252:2'::inet6", [], query_options).rows
+
assert :inet.ntoa(ipv6) == ~c"2001:44c8:129:2632:33:0:252:2"
end
- test "decoded binaries copy behaviour", %{conn: conn} do
+ test "decoded binaries copy behaviour", %{conn: conn, query_options: query_options} do
text = "hello world"
- assert [[bin]] = Ch.query!(conn, "SELECT {$0:String}", [text]).rows
+ assert [[bin]] = Ch.query!(conn, "SELECT {$0:String}", [text], query_options).rows
assert :binary.referenced_byte_size(bin) == :binary.referenced_byte_size("hello world")
# For OTP 20+ refc binaries up to 64 bytes might be copied during a GC
text = String.duplicate("hello world", 6)
- assert [[bin]] = Ch.query!(conn, "SELECT {$0:String}", [text]).rows
+ assert [[bin]] = Ch.query!(conn, "SELECT {$0:String}", [text], query_options).rows
assert :binary.referenced_byte_size(bin) == byte_size(text)
end
- test "encode basic types", %{conn: conn} do
+ test "encode basic types", %{conn: conn, query_options: query_options} do
# TODO
# assert [[nil, nil]] = query("SELECT $1::text, $2::int", [nil, nil])
- assert [[true, false]] = Ch.query!(conn, "SELECT {$0:bool}, {$1:Bool}", [true, false]).rows
- assert [["ẽ"]] = Ch.query!(conn, "SELECT {$0:char}", ["ẽ"]).rows
- assert [[42]] = Ch.query!(conn, "SELECT {$0:int}", [42]).rows
- assert [[42.0, 43.0]] = Ch.query!(conn, "SELECT {$0:float}, {$1:float}", [42, 43.0]).rows
- assert [[nil, nil]] = Ch.query!(conn, "SELECT {$0:float}, {$1:float}", ["NaN", "nan"]).rows
- assert [[nil]] = Ch.query!(conn, "SELECT {$0:float}", ["inf"]).rows
- assert [[nil]] = Ch.query!(conn, "SELECT {$0:float}", ["-inf"]).rows
- assert [["ẽric"]] = Ch.query!(conn, "SELECT {$0:varchar}", ["ẽric"]).rows
- assert [[<<1, 2, 3>>]] = Ch.query!(conn, "SELECT {$0:bytea}", [<<1, 2, 3>>]).rows
+ assert [[true, false]] =
+ Ch.query!(conn, "SELECT {$0:bool}, {$1:Bool}", [true, false], query_options).rows
+
+ assert [["ẽ"]] = Ch.query!(conn, "SELECT {$0:char}", ["ẽ"], query_options).rows
+ assert [[42]] = Ch.query!(conn, "SELECT {$0:int}", [42], query_options).rows
+
+ assert [[42.0, 43.0]] =
+ Ch.query!(conn, "SELECT {$0:float}, {$1:float}", [42, 43.0], query_options).rows
+
+ assert [[nil, nil]] =
+ Ch.query!(conn, "SELECT {$0:float}, {$1:float}", ["NaN", "nan"], query_options).rows
+
+ assert [[nil]] = Ch.query!(conn, "SELECT {$0:float}", ["inf"], query_options).rows
+ assert [[nil]] = Ch.query!(conn, "SELECT {$0:float}", ["-inf"], query_options).rows
+ assert [["ẽric"]] = Ch.query!(conn, "SELECT {$0:varchar}", ["ẽric"], query_options).rows
+
+ assert [[<<1, 2, 3>>]] =
+ Ch.query!(conn, "SELECT {$0:bytea}", [<<1, 2, 3>>], query_options).rows
end
- test "encode numeric", %{conn: conn} do
+ test "encode numeric", %{conn: conn, query_options: query_options} do
nums = [
{"42", "numeric(2,0)"},
{"0.4242", "numeric(4,4)"},
@@ -290,114 +375,141 @@ defmodule Ch.QueryTest do
Enum.each(nums, fn {num, type} ->
dec = Decimal.new(num)
- assert [[dec]] == Ch.query!(conn, "SELECT {$0:#{type}}", [dec]).rows
+ assert [[dec]] == Ch.query!(conn, "SELECT {$0:#{type}}", [dec], query_options).rows
end)
end
- test "encode integers and floats as numeric", %{conn: conn} do
+ test "encode integers and floats as numeric", %{conn: conn, query_options: query_options} do
dec = Decimal.new(1)
- assert [[dec]] == Ch.query!(conn, "SELECT {$0:numeric(1,0)}", [1]).rows
+ assert [[dec]] == Ch.query!(conn, "SELECT {$0:numeric(1,0)}", [1], query_options).rows
dec = Decimal.from_float(1.0)
- assert [[dec]] == Ch.query!(conn, "SELECT {$0:numeric(2,1)}", [1.0]).rows
+ assert [[dec]] == Ch.query!(conn, "SELECT {$0:numeric(2,1)}", [1.0], query_options).rows
end
@tag skip: true
- test "encode json/jsonb", %{conn: conn} do
+ test "encode json/jsonb", %{conn: conn, query_options: query_options} do
json = %{"foo" => 42}
- assert [[json]] == Ch.query!(conn, "SELECT {$0::json}", [json]).rows
+ assert [[json]] == Ch.query!(conn, "SELECT {$0::json}", [json], query_options).rows
end
- test "encode uuid", %{conn: conn} do
+ test "encode uuid", %{conn: conn, query_options: query_options} do
# TODO
uuid = <<0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15>>
uuid_hex = "00010203-0405-0607-0809-0a0b0c0d0e0f"
- assert [[^uuid]] = Ch.query!(conn, "SELECT {$0:UUID}", [uuid_hex]).rows
+ assert [[^uuid]] = Ch.query!(conn, "SELECT {$0:UUID}", [uuid_hex], query_options).rows
end
- test "encode arrays", %{conn: conn} do
- assert [[[]]] = Ch.query!(conn, "SELECT {$0:Array(integer)}", [[]]).rows
- assert [[[1]]] = Ch.query!(conn, "SELECT {$0:Array(integer)}", [[1]]).rows
- assert [[[1, 2]]] = Ch.query!(conn, "SELECT {$0:Array(integer)}", [[1, 2]]).rows
+ test "encode arrays", %{conn: conn, query_options: query_options} do
+ assert [[[]]] = Ch.query!(conn, "SELECT {$0:Array(integer)}", [[]], query_options).rows
+ assert [[[1]]] = Ch.query!(conn, "SELECT {$0:Array(integer)}", [[1]], query_options).rows
- assert [[["1"]]] = Ch.query!(conn, "SELECT {$0:Array(String)}", [["1"]]).rows
- assert [[[true]]] = Ch.query!(conn, "SELECT {$0:Array(Bool)}", [[true]]).rows
+ assert [[[1, 2]]] =
+ Ch.query!(conn, "SELECT {$0:Array(integer)}", [[1, 2]], query_options).rows
+
+ assert [[["1"]]] = Ch.query!(conn, "SELECT {$0:Array(String)}", [["1"]], query_options).rows
+ assert [[[true]]] = Ch.query!(conn, "SELECT {$0:Array(Bool)}", [[true]], query_options).rows
assert [[[~D[2023-01-01]]]] =
- Ch.query!(conn, "SELECT {$0:Array(Date)}", [[~D[2023-01-01]]]).rows
+ Ch.query!(conn, "SELECT {$0:Array(Date)}", [[~D[2023-01-01]]], query_options).rows
assert [[[Ch.Test.to_clickhouse_naive(conn, ~N[2023-01-01 12:00:00])]]] ==
- Ch.query!(conn, "SELECT {$0:Array(DateTime)}", [[~N[2023-01-01 12:00:00]]]).rows
+ Ch.query!(
+ conn,
+ "SELECT {$0:Array(DateTime)}",
+ [[~N[2023-01-01 12:00:00]]],
+ query_options
+ ).rows
assert [[[~U[2023-01-01 12:00:00Z]]]] ==
- Ch.query!(conn, "SELECT {$0:Array(DateTime('UTC'))}", [[~N[2023-01-01 12:00:00]]]).rows
+ Ch.query!(
+ conn,
+ "SELECT {$0:Array(DateTime('UTC'))}",
+ [[~N[2023-01-01 12:00:00]]],
+ query_options
+ ).rows
assert [[[~N[2023-01-01 12:00:00]]]] ==
- Ch.query!(conn, "SELECT {$0:Array(DateTime)}", [[~U[2023-01-01 12:00:00Z]]]).rows
+ Ch.query!(
+ conn,
+ "SELECT {$0:Array(DateTime)}",
+ [[~U[2023-01-01 12:00:00Z]]],
+ query_options
+ ).rows
assert [[[~U[2023-01-01 12:00:00Z]]]] ==
- Ch.query!(conn, "SELECT {$0:Array(DateTime('UTC'))}", [[~U[2023-01-01 12:00:00Z]]]).rows
+ Ch.query!(
+ conn,
+ "SELECT {$0:Array(DateTime('UTC'))}",
+ [[~U[2023-01-01 12:00:00Z]]],
+ query_options
+ ).rows
assert [[[[0], [1]]]] =
- Ch.query!(conn, "SELECT {$0:Array(Array(integer))}", [[[0], [1]]]).rows
+ Ch.query!(conn, "SELECT {$0:Array(Array(integer))}", [[[0], [1]]], query_options).rows
- assert [[[[0]]]] = Ch.query!(conn, "SELECT {$0:Array(Array(integer))}", [[[0]]]).rows
- # assert [[[1, nil, 3]]] = Ch.query!(conn, "SELECT {$0:Array(integer)}", [[1, nil, 3]]).rows
+ assert [[[[0]]]] =
+ Ch.query!(conn, "SELECT {$0:Array(Array(integer))}", [[[0]]], query_options).rows
+
+ # assert [[[1, nil, 3]]] = Ch.query!(conn, "SELECT {$0:Array(integer)}", [[1, nil, 3]], query_options).rows
end
- test "encode network types", %{conn: conn} do
+ test "encode network types", %{conn: conn, query_options: query_options} do
# TODO, or wrap in custom struct like in postgrex
# assert [["127.0.0.1/32"]] =
- # Ch.query!(conn, "SELECT {$0:inet4}::text", [{127, 0, 0, 1}]).rows
+ # Ch.query!(conn, "SELECT {$0:inet4}::text", [{127, 0, 0, 1}], query_options).rows
- assert [[{127, 0, 0, 1}]] = Ch.query!(conn, "SELECT {$0:text}::inet4", ["127.0.0.1"]).rows
+ assert [[{127, 0, 0, 1}]] =
+ Ch.query!(conn, "SELECT {$0:text}::inet4", ["127.0.0.1"], query_options).rows
assert [[{0, 0, 0, 0, 0, 0, 0, 1}]] =
- Ch.query!(conn, "SELECT {$0:text}::inet6", ["::1"]).rows
+ Ch.query!(conn, "SELECT {$0:text}::inet6", ["::1"], query_options).rows
end
- test "result struct", %{conn: conn} do
- assert {:ok, res} = Ch.query(conn, "SELECT 123 AS a, 456 AS b")
+ test "result struct", %{conn: conn, query_options: query_options} do
+ assert {:ok, res} = Ch.query(conn, "SELECT 123 AS a, 456 AS b", [], query_options)
assert %Ch.Result{} = res
assert res.command == :select
assert res.columns == ["a", "b"]
assert res.num_rows == 1
end
- test "empty result struct", %{conn: conn} do
- assert %Ch.Result{} = res = Ch.query!(conn, "select number, 'a' as b from numbers(0)")
+ test "empty result struct", %{conn: conn, query_options: query_options} do
+ assert %Ch.Result{} =
+ res = Ch.query!(conn, "select number, 'a' as b from numbers(0)", [], query_options)
+
assert res.command == :select
assert res.columns == ["number", "b"]
assert res.rows == []
assert res.num_rows == 0
end
- test "error struct", %{conn: conn} do
- assert {:error, %Ch.Error{}} = Ch.query(conn, "SELECT 123 + 'a'")
+ test "error struct", %{conn: conn, query_options: query_options} do
+ assert {:error, %Ch.Error{}} = Ch.query(conn, "SELECT 123 + 'a'", [], query_options)
end
- test "error code", %{conn: conn} do
- assert {:error, %Ch.Error{code: 62}} = Ch.query(conn, "wat")
+ test "error code", %{conn: conn, query_options: query_options} do
+ assert {:error, %Ch.Error{code: 62}} = Ch.query(conn, "wat", [], query_options)
end
- test "connection works after failure in execute", %{conn: conn} do
- assert {:error, %Ch.Error{}} = Ch.query(conn, "wat")
- assert [[42]] = Ch.query!(conn, "SELECT 42").rows
+ test "connection works after failure in execute", %{conn: conn, query_options: query_options} do
+ assert {:error, %Ch.Error{}} = Ch.query(conn, "wat", [], query_options)
+ assert [[42]] = Ch.query!(conn, "SELECT 42", [], query_options).rows
end
- test "async test", %{conn: conn} do
+ test "async test", %{conn: conn, query_options: query_options} do
self_pid = self()
Enum.each(1..10, fn _ ->
spawn_link(fn ->
- send(self_pid, Ch.query!(conn, "SELECT sleep(0.05)").rows)
+ send(self_pid, Ch.query!(conn, "SELECT sleep(0.05)", [], query_options).rows)
end)
end)
- assert [[42]] = Ch.query!(conn, "SELECT 42").rows
+ assert [[42]] = Ch.query!(conn, "SELECT 42", [], query_options).rows
Enum.each(1..10, fn _ ->
- assert_receive [[0]]
+ assert_receive [[0]], :timer.seconds(1)
end)
end
@@ -406,13 +518,13 @@ defmodule Ch.QueryTest do
end
end
- test "query before and after idle ping" do
+ test "query before and after idle ping", %{query_options: query_options} do
opts = [backoff_type: :stop, idle_interval: 1]
{:ok, pid} = Ch.start_link(opts)
- assert {:ok, _} = Ch.query(pid, "SELECT 42")
+ assert {:ok, _} = Ch.query(pid, "SELECT 42", [], query_options)
:timer.sleep(20)
- assert {:ok, _} = Ch.query(pid, "SELECT 42")
+ assert {:ok, _} = Ch.query(pid, "SELECT 42", [], query_options)
:timer.sleep(20)
- assert {:ok, _} = Ch.query(pid, "SELECT 42")
+ assert {:ok, _} = Ch.query(pid, "SELECT 42", [], query_options)
end
end
diff --git a/test/ch/settings_test.exs b/test/ch/settings_test.exs
index eb28489..ea28cf3 100644
--- a/test/ch/settings_test.exs
+++ b/test/ch/settings_test.exs
@@ -1,24 +1,33 @@
defmodule Ch.SettingsTest do
- use ExUnit.Case
+ use ExUnit.Case, parameterize: [%{query_options: []}, %{query_options: [multipart: true]}]
- test "can start without settings" do
+ setup ctx do
+ {:ok, query_options: ctx[:query_options] || []}
+ end
+
+ test "can start without settings", %{query_options: query_options} do
assert {:ok, conn} = Ch.start_link()
assert {:ok, %{num_rows: 1, rows: [["async_insert", "Bool", "0"]]}} =
- Ch.query(conn, "show settings like 'async_insert'")
+ Ch.query(conn, "show settings like 'async_insert'", [], query_options)
end
- test "can pass default settings" do
+ test "can pass default settings", %{query_options: query_options} do
assert {:ok, conn} = Ch.start_link(settings: [async_insert: 1])
assert {:ok, %{num_rows: 1, rows: [["async_insert", "Bool", "1"]]}} =
- Ch.query(conn, "show settings like 'async_insert'")
+ Ch.query(conn, "show settings like 'async_insert'", [], query_options)
end
- test "can overwrite default settings with options" do
+ test "can overwrite default settings with options", %{query_options: query_options} do
assert {:ok, conn} = Ch.start_link(settings: [async_insert: 1])
assert {:ok, %{num_rows: 1, rows: [["async_insert", "Bool", "0"]]}} =
- Ch.query(conn, "show settings like 'async_insert'", [], settings: [async_insert: 0])
+ Ch.query(
+ conn,
+ "show settings like 'async_insert'",
+ [],
+ Keyword.merge(query_options, settings: [async_insert: 0])
+ )
end
end
diff --git a/test/ch/stream_test.exs b/test/ch/stream_test.exs
index 2d6e4c6..62be761 100644
--- a/test/ch/stream_test.exs
+++ b/test/ch/stream_test.exs
@@ -1,17 +1,25 @@
defmodule Ch.StreamTest do
- use ExUnit.Case
+ use ExUnit.Case, parameterize: [%{query_options: []}, %{query_options: [multipart: true]}]
alias Ch.{Result, RowBinary}
+ setup ctx do
+ {:ok, query_options: ctx[:query_options] || []}
+ end
+
setup do
{:ok, conn: start_supervised!({Ch, database: Ch.Test.database()})}
end
describe "enumerable Ch.stream/4" do
- test "emits %Ch.Result{}", %{conn: conn} do
+ test "emits %Ch.Result{}", %{conn: conn, query_options: query_options} do
results =
DBConnection.run(conn, fn conn ->
conn
- |> Ch.stream("select * from numbers({count:UInt64})", %{"count" => 1_000_000})
+ |> Ch.stream(
+ "select * from numbers({count:UInt64})",
+ %{"count" => 1_000_000},
+ query_options
+ )
|> Enum.into([])
end)
@@ -19,23 +27,27 @@ defmodule Ch.StreamTest do
Enum.to_list(0..999_999)
end
- test "raises on error", %{conn: conn} do
+ test "raises on error", %{conn: conn, query_options: query_options} do
assert_raise Ch.Error,
~r/Code: 62. DB::Exception: Syntax error: failed at position 8/,
fn ->
DBConnection.run(conn, fn conn ->
- conn |> Ch.stream("select ", %{"count" => 1_000_000}) |> Enum.into([])
+ conn
+ |> Ch.stream("select ", %{"count" => 1_000_000}, query_options)
+ |> Enum.into([])
end)
end
end
- test "large strings", %{conn: conn} do
+ test "large strings", %{conn: conn, query_options: query_options} do
results =
DBConnection.run(conn, fn conn ->
conn
- |> Ch.stream("select repeat('abc', 500000) from numbers({count:UInt64})", %{
- "count" => 10
- })
+ |> Ch.stream(
+ "select repeat('abc', 500000) from numbers({count:UInt64})",
+ %{"count" => 10},
+ query_options
+ )
|> Enum.into([])
end)
@@ -47,8 +59,9 @@ defmodule Ch.StreamTest do
end
describe "collectable Ch.stream/4" do
- test "inserts chunks", %{conn: conn} do
+ test "inserts chunks", %{conn: conn, query_options: query_options} do
Ch.query!(conn, "create table collect_stream(i UInt64) engine Memory")
+ on_exit(fn -> Ch.Test.query("DROP TABLE collect_stream") end)
assert %Ch.Result{command: :insert, num_rows: 1_000_000} =
DBConnection.run(conn, fn conn ->
@@ -61,7 +74,7 @@ defmodule Ch.StreamTest do
conn,
"insert into collect_stream(i) format RowBinary",
_params = [],
- encode: false
+ Keyword.merge(query_options, encode: false)
)
)
end)
diff --git a/test/ch/variant_test.exs b/test/ch/variant_test.exs
index 23a2c57..fbd7144 100644
--- a/test/ch/variant_test.exs
+++ b/test/ch/variant_test.exs
@@ -1,5 +1,6 @@
defmodule Ch.VariantTest do
- use ExUnit.Case
+ use ExUnit.Case, parameterize: [%{query_options: []}, %{query_options: [multipart: true]}]
+ import Ch.Test, only: [parameterize_query!: 2, parameterize_query!: 4]
# https://clickhouse.com/docs/sql-reference/data-types/variant
@@ -10,17 +11,26 @@ defmodule Ch.VariantTest do
{:ok, conn: conn}
end
- test "basic", %{conn: conn} do
- assert Ch.query!(conn, "select null::Variant(UInt64, String, Array(UInt64))").rows == [[nil]]
- assert Ch.query!(conn, "select [1]::Variant(UInt64, String, Array(UInt64))").rows == [[[1]]]
- assert Ch.query!(conn, "select 0::Variant(UInt64, String, Array(UInt64))").rows == [[0]]
+ test "basic", ctx do
+ assert parameterize_query!(ctx, "select null::Variant(UInt64, String, Array(UInt64))").rows ==
+ [[nil]]
- assert Ch.query!(conn, "select 'Hello, World!'::Variant(UInt64, String, Array(UInt64))").rows ==
+ assert parameterize_query!(ctx, "select [1]::Variant(UInt64, String, Array(UInt64))").rows ==
+ [[[1]]]
+
+ assert parameterize_query!(ctx, "select 0::Variant(UInt64, String, Array(UInt64))").rows == [
+ [0]
+ ]
+
+ assert parameterize_query!(
+ ctx,
+ "select 'Hello, World!'::Variant(UInt64, String, Array(UInt64))"
+ ).rows ==
[["Hello, World!"]]
end
# https://github.com/plausible/ch/issues/272
- test "ordering internal types", %{conn: conn} do
+ test "ordering internal types", ctx do
test = %{
"'hello'" => "hello",
"-10" => -10,
@@ -30,27 +40,27 @@ defmodule Ch.VariantTest do
}
for {value, expected} <- test do
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"select #{value}::Variant(String, Int32, Bool, Map(String, Nullable(String)))"
).rows == [[expected]]
end
end
- test "with a table", %{conn: conn} do
+ test "with a table", ctx do
# https://clickhouse.com/docs/sql-reference/data-types/variant#creating-variant
- Ch.query!(conn, """
+ parameterize_query!(ctx, """
CREATE TABLE variant_test (v Variant(UInt64, String, Array(UInt64))) ENGINE = Memory;
""")
- on_exit(fn -> Ch.Test.query("DROP TABLE variant_test", [], database: Ch.Test.database()) end)
+ on_exit(fn -> Ch.Test.query("DROP TABLE variant_test") end)
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"INSERT INTO variant_test VALUES (NULL), (42), ('Hello, World!'), ([1, 2, 3]);"
)
- assert Ch.query!(conn, "SELECT v FROM variant_test").rows == [
+ assert parameterize_query!(ctx, "SELECT v FROM variant_test").rows == [
[nil],
[42],
["Hello, World!"],
@@ -58,7 +68,10 @@ defmodule Ch.VariantTest do
]
# https://clickhouse.com/docs/sql-reference/data-types/variant#reading-variant-nested-types-as-subcolumns
- assert Ch.query!(conn, "SELECT v, v.String, v.UInt64, v.`Array(UInt64)` FROM variant_test;").rows ==
+ assert parameterize_query!(
+ ctx,
+ "SELECT v, v.String, v.UInt64, v.`Array(UInt64)` FROM variant_test;"
+ ).rows ==
[
[nil, nil, nil, []],
[42, nil, 42, []],
@@ -66,8 +79,8 @@ defmodule Ch.VariantTest do
[[1, 2, 3], nil, nil, [1, 2, 3]]
]
- assert Ch.query!(
- conn,
+ assert parameterize_query!(
+ ctx,
"SELECT v, variantElement(v, 'String'), variantElement(v, 'UInt64'), variantElement(v, 'Array(UInt64)') FROM variant_test;"
).rows == [
[nil, nil, nil, []],
@@ -77,21 +90,21 @@ defmodule Ch.VariantTest do
]
end
- test "rowbinary", %{conn: conn} do
- Ch.query!(conn, """
+ test "rowbinary", ctx do
+ parameterize_query!(ctx, """
CREATE TABLE variant_test (v Variant(UInt64, String, Array(UInt64))) ENGINE = Memory;
""")
- on_exit(fn -> Ch.Test.query("DROP TABLE variant_test", [], database: Ch.Test.database()) end)
+ on_exit(fn -> Ch.Test.query("DROP TABLE variant_test") end)
- Ch.query!(
- conn,
+ parameterize_query!(
+ ctx,
"INSERT INTO variant_test FORMAT RowBinary",
[[nil], [42], ["Hello, World!"], [[1, 2, 3]]],
types: ["Variant(UInt64, String, Array(UInt64))"]
)
- assert Ch.query!(conn, "SELECT v FROM variant_test").rows == [
+ assert parameterize_query!(ctx, "SELECT v FROM variant_test").rows == [
[nil],
[42],
["Hello, World!"],
diff --git a/test/support/test.ex b/test/support/test.ex
index 4200c01..301e9ab 100644
--- a/test/support/test.ex
+++ b/test/support/test.ex
@@ -8,12 +8,40 @@ defmodule Ch.Test do
task =
Task.async(fn ->
{:ok, pid} = Ch.start_link(opts)
+ opts = Keyword.put_new_lazy(opts, :database, &database/0)
Ch.query!(pid, sql, params, opts)
end)
Task.await(task)
end
+ # helper for ExUnit.Case :parameterize
+ def parameterize_query_options(ctx, options \\ []) do
+ if default_options = ctx[:query_options] do
+ Keyword.merge(default_options, options)
+ else
+ options
+ end
+ end
+
+ def parameterize_query(ctx, sql, params \\ [], options \\ []) do
+ Ch.query(
+ ctx.conn,
+ sql,
+ params,
+ parameterize_query_options(ctx, options)
+ )
+ end
+
+ def parameterize_query!(ctx, sql, params \\ [], options \\ []) do
+ Ch.query!(
+ ctx.conn,
+ sql,
+ params,
+ parameterize_query_options(ctx, options)
+ )
+ end
+
# TODO packet: :http?
def intercept_packets(socket, buffer \\ <<>>) do
receive do
diff --git a/test/test_helper.exs b/test/test_helper.exs
index eef00d8..97caedd 100644
--- a/test/test_helper.exs
+++ b/test/test_helper.exs
@@ -21,8 +21,17 @@ Calendar.put_time_zone_database(Tz.TimeZoneDatabase)
default_test_db = System.get_env("CH_DATABASE", "ch_elixir_test")
Application.put_env(:ch, :database, default_test_db)
-Ch.Test.query("DROP DATABASE IF EXISTS {db:Identifier}", %{"db" => default_test_db})
-Ch.Test.query("CREATE DATABASE {db:Identifier}", %{"db" => default_test_db})
+Ch.Test.query(
+ "DROP DATABASE IF EXISTS {db:Identifier}",
+ %{"db" => default_test_db},
+ database: "default"
+)
+
+Ch.Test.query(
+ "CREATE DATABASE {db:Identifier}",
+ %{"db" => default_test_db},
+ database: "default"
+)
%{rows: [[ch_version]]} = Ch.Test.query("SELECT version()")