Skip to content

Commit

Permalink
feat: refactor part 1
Browse files Browse the repository at this point in the history
  • Loading branch information
Ziinc committed Nov 19, 2024
1 parent 0dba184 commit fb031fe
Show file tree
Hide file tree
Showing 9 changed files with 60 additions and 75 deletions.
3 changes: 3 additions & 0 deletions assets/css/app.scss
Original file line number Diff line number Diff line change
Expand Up @@ -274,6 +274,9 @@ a:focus, button.btn-link:focus {

mark {
background-color: #5eeb8f;
& .log-error {
background-color: #f14216;
}
}

.example-log-code {
Expand Down
14 changes: 0 additions & 14 deletions lib/logflare/ecto/bigquery/ecto_bq_query_api_udf.ex
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,6 @@ defmodule Logflare.Ecto.BQQueryAPI.UDF do
"""
import Logflare.Ecto.BQQueryAPI, only: [to_bq_interval_token: 1]

defmacro lf_timestamp_trunc(timestamp, interval) do
fragment_string = udf_function("LF_TIMESTAMP_TRUNC(?, ?)")

interval = quoted_interval(interval)

quote do
fragment(
unquote(fragment_string),
unquote(timestamp),
^unquote(quote(do: to_bq_interval_token(unquote(interval))))
)
end
end

defmacro lf_timestamp_sub(date, count, interval) do
fragment_string = udf_function("LF_TIMESTAMP_SUB(?, ?, ?)")

Expand Down
4 changes: 3 additions & 1 deletion lib/logflare/logs/logs_search.ex
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,9 @@ defmodule Logflare.Logs.Search do
def search_events(%SO{} = so) do
so = %{so | type: :events} |> put_time_stats()

with %{error: nil} = so <- apply_query_defaults(so),
with
%{error: nil} = so <- put_chart_data_shape_id(so),
%{error: nil} = so <- apply_query_defaults(so),
%{error: nil} = so <- apply_halt_conditions(so),
%{error: nil} = so <- apply_local_timestamp_correction(so),
%{error: nil} = so <- apply_timestamp_filter_rules(so),
Expand Down
2 changes: 1 addition & 1 deletion lib/logflare/logs/lql/bigquery/lql_ecto_helpers.ex
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ defmodule Logflare.Lql.EctoHelpers do
)
end
end)
end
end
end

defp split_by_dots(str) do
Expand Down
27 changes: 17 additions & 10 deletions lib/logflare/logs/search/logs_search_operations.ex
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,21 @@ defmodule Logflare.Logs.SearchOperations do
end

@spec apply_query_defaults(SO.t()) :: SO.t()
def apply_query_defaults(%SO{type: :events} = so) do
def apply_query_defaults(%SO{chart_data_shape_id: chart_data_shape_id} = so) do
dbg(so)
query =
from(so.source.bq_table_id)
|> select_default_fields(:events)
|> order_by(desc: :timestamp)
|> select( [t], [t.timestamp, t.id, t.event_message])
|> case do
q when chart_data_shape_id == :elixir_logger_levels ->
q
|> Lql.EctoHelpers.unnest_and_join_nested_columns(:inner, "metadata")
|> select_merge([..., m], %{level: m.level})
q -> q
end
|> order_by([t], desc: t.timestamp)
|> limit(@default_limit)

|> dbg()
%{so | query: query}
end

Expand Down Expand Up @@ -132,21 +140,20 @@ defmodule Logflare.Logs.SearchOperations do
SourceSchemas.get_source_schema_by(source_id: so.source.id)
|> Map.get(:schema_flat_map)

[%{path: path}] = so.chart_rules
path_is_timestamp? = path == "timestamp"

dbg({so.type, Map.has_key?(flat_type_map, "metadata.level")})
chart_data_shape_id =
cond do
path_is_timestamp? and Map.has_key?(flat_type_map, "metadata.status_code") ->
Map.has_key?(flat_type_map, "metadata.status_code") ->
:netlify_status_codes

path_is_timestamp? and Map.has_key?(flat_type_map, "metadata.response.status_code") ->
Map.has_key?(flat_type_map, "metadata.response.status_code") ->
:cloudflare_status_codes

path_is_timestamp? and Map.has_key?(flat_type_map, "metadata.proxy.statusCode") ->
Map.has_key?(flat_type_map, "metadata.proxy.statusCode") ->
:vercel_status_codes

path_is_timestamp? and Map.has_key?(flat_type_map, "metadata.level") ->
Map.has_key?(flat_type_map, "metadata.level") ->
:elixir_logger_levels

true ->
Expand Down
49 changes: 33 additions & 16 deletions lib/logflare/logs/search/logs_search_queries.ex
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,23 @@ defmodule Logflare.Logs.SearchQueries do
|> select([t], %{
timestamp:
fragment(
"(`$$__DEFAULT_DATASET__$$`.LF_TIMESTAMP_TRUNC(?, ?)) as timestamp",

"""
(case
when ? = 'DAY' then TIMESTAMP_TRUNC(?, DAY)
when ? = 'HOUR' then TIMESTAMP_TRUNC(?, HOUR)
when ? = 'MINUTE' then TIMESTAMP_TRUNC(?, MINUTE)
when ? = 'SECOND' then TIMESTAMP_TRUNC(?, SECOND)
end) as timestamp
""",
^String.upcase("#{chart_period}"),
t.timestamp,
^String.upcase("#{chart_period}"),
t.timestamp,
^String.upcase("#{chart_period}"),
t.timestamp,
^String.upcase("#{chart_period}")
^String.upcase("#{chart_period}"),
t.timestamp
)
})
end
Expand All @@ -24,9 +38,25 @@ defmodule Logflare.Logs.SearchQueries do
|> select([t], %{
timestamp:
fragment(
"(`$$__DEFAULT_DATASET__$$`.LF_TIMESTAMP_TRUNC_WITH_TIMEZONE(?, ?, ?)) as timestamp",
"""
(case
when ? = 'DAY' then TIMESTAMP_TRUNC(?, DAY , ?)
when ? = 'HOUR' then TIMESTAMP_TRUNC(?, HOUR , ?)
when ? = 'MINUTE' then TIMESTAMP_TRUNC(?, MINUTE , ?)
when ? = 'SECOND' then TIMESTAMP_TRUNC(?, SECOND , ?)
end) as timestamp
""",
^String.upcase("#{chart_period}"),
t.timestamp,
^timezone,
^String.upcase("#{chart_period}"),
t.timestamp,
^timezone,
^String.upcase("#{chart_period}"),
t.timestamp,
^timezone,
^String.upcase("#{chart_period}"),
t.timestamp,
^timezone
)
})
Expand Down Expand Up @@ -93,10 +123,6 @@ defmodule Logflare.Logs.SearchQueries do
limit(query, ^number)
end

def timestamp_truncator(period) when period in @chart_periods do
dynamic([t], lf_timestamp_trunc(t.timestamp, ^period))
end

def where_streaming_buffer(query) do
where(query, in_streaming_buffer())
end
Expand All @@ -113,12 +139,6 @@ defmodule Logflare.Logs.SearchQueries do
)
end

def select_timestamp_trunc(query, chart_period) do
select(query, [t, ...], %{
timestamp: lf_timestamp_trunc(t.timestamp, ^chart_period)
})
end

def select_merge_total(query) do
select_merge(query, [t, ...], %{
total: fragment("COUNT(?) as total", t.timestamp)
Expand Down Expand Up @@ -269,9 +289,6 @@ defmodule Logflare.Logs.SearchQueries do
|> select([t], fragment("*"))
end

def select_default_fields(query, :events) do
select(query, [:timestamp, :id, :event_message])
end

def source_table_streaming_buffer(bq_table_id) when is_binary(bq_table_id) do
from(bq_table_id)
Expand Down
2 changes: 0 additions & 2 deletions lib/logflare/source/bigquery/bigquery_udf.ex
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,6 @@ defmodule Logflare.User.BigQueryUDFs do
when is_binary(bdid) and bdid != "" and is_binary(bqid) and bqid != "" do
"
#{SFns.lf_timestamp_sub(bqid, bdid)}
#{SFns.lf_timestamp_trunc(bqid, bdid)}
#{SFns.lf_timestamp_trunc_with_timezone(bqid, bdid)}
#{SFns.lf_generate_timestamp_array(bqid, bdid)}
"
end
Expand Down
30 changes: 0 additions & 30 deletions lib/logflare/source/bigquery/udf/search_fns.ex
Original file line number Diff line number Diff line change
Expand Up @@ -19,36 +19,6 @@ defmodule Logflare.User.BigQueryUDFs.SearchFns do
"
end

def lf_timestamp_trunc(bq_project_id, bq_dataset_id) do
"
CREATE OR REPLACE FUNCTION
`#{bq_project_id}`.#{bq_dataset_id}.LF_TIMESTAMP_TRUNC(_timestamp TIMESTAMP, _date_part STRING) AS (
CASE _date_part
WHEN 'MICROSECOND' THEN TIMESTAMP_TRUNC(_timestamp, MICROSECOND)
WHEN 'MILLISECOND' THEN TIMESTAMP_TRUNC(_timestamp, MILLISECOND)
WHEN 'SECOND' THEN TIMESTAMP_TRUNC(_timestamp, SECOND)
WHEN 'MINUTE' THEN TIMESTAMP_TRUNC(_timestamp, MINUTE)
WHEN 'HOUR' THEN TIMESTAMP_TRUNC(_timestamp, HOUR)
WHEN 'DAY' THEN TIMESTAMP_TRUNC(_timestamp, DAY)
END);
"
end

def lf_timestamp_trunc_with_timezone(bq_project_id, bq_dataset_id) do
"
CREATE OR REPLACE FUNCTION
`#{bq_project_id}`.#{bq_dataset_id}.LF_TIMESTAMP_TRUNC_WITH_TIMEZONE(_timestamp TIMESTAMP, _date_part STRING, timezone STRING) AS (
CASE _date_part
WHEN 'MICROSECOND' THEN TIMESTAMP_TRUNC(_timestamp, MICROSECOND, timezone)
WHEN 'MILLISECOND' THEN TIMESTAMP_TRUNC(_timestamp, MILLISECOND, timezone)
WHEN 'SECOND' THEN TIMESTAMP_TRUNC(_timestamp, SECOND, timezone)
WHEN 'MINUTE' THEN TIMESTAMP_TRUNC(_timestamp, MINUTE, timezone)
WHEN 'HOUR' THEN TIMESTAMP_TRUNC(_timestamp, HOUR, timezone)
WHEN 'DAY' THEN TIMESTAMP_TRUNC(_timestamp, DAY, timezone)
END);
"
end

def lf_generate_timestamp_array(bq_project_id, bq_dataset_id) do
"
CREATE OR REPLACE FUNCTION
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,16 @@
<!-- TODO: TO BE DELETED WHEN UNDERLYING ISSUE IS FOUND -->
<% %{"timestamp" => timestamp, "event_message" => message} = log.body
#dbg(log.body)
log_level = get_in(log.body,["metadata", "level"]) || get_in(log.body,["level"]) || "info"
formatted_timestamp =
if @use_local_time do
format_timestamp(timestamp, @user_local_timezone)
else
format_timestamp(timestamp) <> " UTC"
end %>
<li id={"log-event_#{log.id || log.body["timestamp"]}"} class="tw-group">
<span class="tw-whitespace-pre-wrap"><mark class="log-datestamp" data-timestamp={timestamp}><%= formatted_timestamp %></mark>&nbsp;<%= message %></span>
<span class="tw-whitespace-pre-wrap"><mark class={"log-#{log_level}"} data-timestamp={timestamp}><%= formatted_timestamp %></mark>&nbsp;<%= message %></span>
<span class="tw-inline-block tw-text-[0.65rem] tw-align-text-bottom tw-inline-flex tw-flex-row tw-gap-2">
<%= live_modal_show_link(component: LogflareWeb.Search.LogEventViewerComponent, modal_id: :log_event_viewer, title: "Log Event", phx_value_log_event_id: log.id, phx_value_log_event_timestamp: log.body["timestamp"]) do %>
<span>view</span>
Expand Down

0 comments on commit fb031fe

Please sign in to comment.