diff --git a/lib/logflare/sql.ex b/lib/logflare/sql.ex index cc17144f6..f5c8ded29 100644 --- a/lib/logflare/sql.ex +++ b/lib/logflare/sql.ex @@ -917,6 +917,7 @@ defmodule Logflare.Sql do cte_from_aliases: cte_from_aliases, in_cte_tables_tree: false, in_cast: false, + in_projection_tree: false, from_table_aliases: [], from_table_values: [], in_binaryop: false @@ -935,7 +936,11 @@ defmodule Logflare.Sql do # convert body.timestamp from unix microsecond to postgres timestamp defp convert_keys_to_json_query( %{"CompoundIdentifier" => [%{"value" => "timestamp"}]}, - %{in_cte_tables_tree: in_cte_tables_tree, cte_aliases: cte_aliases} = _data, + %{ + in_cte_tables_tree: in_cte_tables_tree, + cte_aliases: cte_aliases, + in_projection_tree: false + } = _data, [ table, "body" @@ -1197,6 +1202,10 @@ defmodule Logflare.Sql do {k, traverse_convert_identifiers(v, Map.put(data, :in_cte_tables_tree, true))} end + defp traverse_convert_identifiers({"projection" = k, v}, data) do + {k, traverse_convert_identifiers(v, Map.put(data, :in_projection_tree, true))} + end + # handle top level queries defp traverse_convert_identifiers( {"Query" = k, %{"body" => %{"Select" => %{"from" => [_ | _] = from_list}}} = v}, diff --git a/test/logflare/sql_test.exs b/test/logflare/sql_test.exs index e8c1d6180..7251fe89b 100644 --- a/test/logflare/sql_test.exs +++ b/test/logflare/sql_test.exs @@ -718,8 +718,16 @@ defmodule Logflare.SqlTest do test "unix microsecond timestamp handling" do bq_query = ~s|select t.timestamp as ts from my_table t| + pg_query = ~s|select (t.body -> 'timestamp') as ts from my_table t| + + {:ok, translated} = Sql.translate(:bq_sql, :pg_sql, bq_query) + assert Sql.Parser.parse("postgres", translated) == Sql.Parser.parse("postgres", pg_query) + + # only convert if not in projection + bq_query = ~s|select t.id as id from my_table t where t.timestamp is not null| + pg_query = - ~s|select (to_timestamp( (t.body ->> 'timestamp')::bigint / 1000000.0) AT TIME ZONE 'UTC') as ts from my_table t| + ~s|select (t.body -> 'id') as id from my_table t where (to_timestamp( (t.body ->> 'timestamp')::bigint / 1000000.0) AT TIME ZONE 'UTC') is not null| {:ok, translated} = Sql.translate(:bq_sql, :pg_sql, bq_query) assert Sql.Parser.parse("postgres", translated) == Sql.Parser.parse("postgres", pg_query) @@ -742,7 +750,7 @@ defmodule Logflare.SqlTest do pg_query = ~s""" with edge_logs as ( select - (to_timestamp( (t.body ->> 'timestamp')::bigint / 1000000.0) AT TIME ZONE 'UTC') as timestamp, + (t.body -> 'timestamp') as timestamp, (t.body -> 'id') as id, (t.body -> 'event_message') AS event_message, (t.body -> 'metadata') as metadata diff --git a/test/logflare_web/controllers/endpoints_controller_test.exs b/test/logflare_web/controllers/endpoints_controller_test.exs index 603cf1e7f..a94a071e8 100644 --- a/test/logflare_web/controllers/endpoints_controller_test.exs +++ b/test/logflare_web/controllers/endpoints_controller_test.exs @@ -201,7 +201,12 @@ defmodule LogflareWeb.EndpointsControllerTest do |> put_req_header("x-api-key", user.api_key) |> get(~p"/endpoints/query/logs.all?#{params}") - assert [%{"event_message" => "some message"}] = json_response(conn, 200)["result"] + assert [%{"event_message" => "some message", "timestamp" => timestamp}] = + json_response(conn, 200)["result"] + + # render as unix microsecond + assert inspect(timestamp) |> String.length() == 16 + assert "16" <> _ = inspect(timestamp) assert conn.halted == false # test a logs ui query