Skip to content

Commit

Permalink
chore: fix failing tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Ziinc committed Nov 5, 2024
1 parent b247846 commit d00e009
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 14 deletions.
5 changes: 2 additions & 3 deletions lib/logflare/backends/source_metrics_cache.ex
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
defmodule Logflare.Backends.SourceMetricsCache do
@moduledoc false
import Cachex.Spec
alias Logflare.Logs.LogEvents
alias Logflare.ContextCache
alias Logflare.LogEvent, as: LE
@ttl :timer.hours(1)
alias Logflare.Utils

@cache __MODULE__

Expand Down Expand Up @@ -37,7 +36,7 @@ defmodule Logflare.Backends.SourceMetricsCache do
@spec put_event_with_id_and_timestamp(atom, keyword, LE.t()) :: term
def put_event_with_id_and_timestamp(source_token, kw, %LE{} = log_event) do
cache_key = {@fetch_event_by_id_and_timestamp_key, [source_token, kw]}
Cachex.put(@cache, cache_key, {:ok, log_event}, expire: @ttl)
Cachex.put(@cache, cache_key, {:ok, log_event})
end

@spec fetch_event_by_id_and_timestamp(atom, keyword) :: {:ok, map()} | {:error, map()}
Expand Down
2 changes: 1 addition & 1 deletion lib/logflare/context_cache.ex
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ defmodule Logflare.ContextCache do
def bust_keys(values) when is_list(values) do
for {context, primary_key} <- values do
filter = {:==, {:element, 1, :key}, {{context, primary_key}}}
query = Cachex.Query.create(filter, {:key, :value})
query = Cachex.Query.build(where: filter, output: {:key, :value})
context_cache = cache_name(context)

Logflare.ContextCache
Expand Down
1 change: 0 additions & 1 deletion lib/logflare/logs/log_events_cache.ex
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
defmodule Logflare.Logs.LogEvents.Cache do
@moduledoc false
import Cachex.Spec
alias Logflare.Logs.LogEvents
alias Logflare.ContextCache
alias Logflare.LogEvent, as: LE
Expand Down
21 changes: 12 additions & 9 deletions lib/logflare/logs/rejected_log_events.ex
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ defmodule Logflare.Logs.RejectedLogEvents do
}'
```
"""
require Ex2ms
alias Logflare.Source
alias Logflare.LogEvent, as: LE
alias Logflare.Utils
Expand All @@ -37,6 +38,7 @@ defmodule Logflare.Logs.RejectedLogEvents do
[
@cache,
[
expiration: Utils.cache_expiration_min(60),
hooks:
[
if(stats, do: Utils.cache_stats()),
Expand All @@ -50,7 +52,11 @@ defmodule Logflare.Logs.RejectedLogEvents do

@spec get_by_source(Source.t()) :: list(LE.t())
def get_by_source(%Source{token: token}) do
get!(token).log_events
get!(token)
|> case do
%{log_events: events} -> events
other -> other
end
end

def count(%Source{} = s) do
Expand Down Expand Up @@ -80,15 +86,12 @@ defmodule Logflare.Logs.RejectedLogEvents do
:ok
end

def query(source_id) when is_atom(source_id) do
def query(token) when is_atom(token) do
filter = {:==, {:element, 1, :key}, {:const, token}}
query = Cachex.Query.build(where: filter, output: :value)

@cache
|> Cachex.stream!()
|> Stream.filter(fn x ->
match?({:entry, {^source_id, _le_id}, _ts, _, _le}, x)
end)
|> Stream.map(fn {:entry, {^source_id, _le_id}, _ts, _, le} ->
le
end)
|> Cachex.stream!(query)
|> Enum.reverse()
|> Enum.take(100)
end
Expand Down

0 comments on commit d00e009

Please sign in to comment.