diff --git a/lib/logflare/sources.ex b/lib/logflare/sources.ex
index 2738d7ccb..442b7a865 100644
--- a/lib/logflare/sources.ex
+++ b/lib/logflare/sources.ex
@@ -18,6 +18,12 @@ defmodule Logflare.Sources do
alias Logflare.SourceSchemas
alias Logflare.User
alias Logflare.Backends
+ alias Logflare.Billing.Plan
+ alias Logflare.Billing
+ alias Logflare.User
+ alias Logflare.Users
+ alias Logflare.SingleTenant
+ alias Logflare.Google.BigQuery
require Logger
@@ -37,6 +43,7 @@ defmodule Logflare.Sources do
def list_sources_by_user(user_id) do
from(s in Source, where: s.user_id == ^user_id)
|> Repo.all()
+ |> Enum.map(&put_retention_days/1)
end
@spec create_source(map(), User.t()) :: {:ok, Source.t()} | {:error, Ecto.Changeset.t()}
@@ -46,7 +53,7 @@ defmodule Logflare.Sources do
|> Enum.map(fn {k, v} -> {to_string(k), v} end)
|> Map.new()
- with {:ok, source} = res <-
+ with {:ok, source} <-
user
|> Ecto.build_assoc(:sources)
|> Source.update_by_user_changeset(source_params)
@@ -55,7 +62,11 @@ defmodule Logflare.Sources do
create_big_query_schema_and_start_source(source)
end
- res
+ updated =
+ source
+ |> put_retention_days()
+
+ {:ok, updated}
end
end
@@ -108,6 +119,7 @@ defmodule Logflare.Sources do
def get(source_id) when is_integer(source_id) do
Repo.get(Source, source_id)
+ |> put_retention_days()
end
def update_source(source) do
@@ -118,12 +130,35 @@ defmodule Logflare.Sources do
source
|> Source.changeset(attrs)
|> Repo.update()
+ |> post_update(source)
+ end
+
+ defp post_update({:ok, updated}, source) do
+ # only update the default backend
+ source = put_retention_days(source)
+ updated = put_retention_days(updated)
+
+ if source.retention_days != updated.retention_days and not SingleTenant.postgres_backend?() do
+ user = Users.Cache.get(updated.user_id) |> Users.maybe_put_bigquery_defaults()
+
+ BigQuery.patch_table_ttl(
+ updated.token,
+ updated.retention_days * 86_400_000,
+ user.bigquery_dataset_id,
+ user.bigquery_project_id
+ )
+ end
+
+ {:ok, updated}
end
+ defp post_update(res, _prev), do: res
+
def update_source_by_user(source, attrs) do
source
|> Source.update_by_user_changeset(attrs)
|> Repo.update()
+ |> post_update(source)
end
def update_source_by_user(_source, _plan, %{"notifications_every" => ""}) do
@@ -159,6 +194,7 @@ defmodule Logflare.Sources do
@spec get_by(Keyword.t()) :: Source.t() | nil
def get_by(kw) do
Repo.get_by(Source, kw)
+ |> put_retention_days()
end
@spec get_by_and_preload(Keyword.t()) :: Source.t() | nil
@@ -169,6 +205,7 @@ defmodule Logflare.Sources do
nil -> nil
s -> preload_defaults(s)
end)
+ |> put_retention_days()
end
@spec get_by_and_preload(Keyword.t(), Keyword.t()) :: Source.t() | nil
@@ -179,6 +216,7 @@ defmodule Logflare.Sources do
nil -> nil
s -> Repo.preload(s, preloads)
end)
+ |> put_retention_days()
end
def get_rate_limiter_metrics(source, bucket: :default) do
@@ -402,4 +440,30 @@ defmodule Logflare.Sources do
_ -> false
end
end
+
+ def put_retention_days(%Source{} = source) do
+ user = Users.Cache.get(source.user_id)
+ plan = Billing.Cache.get_plan_by_user(user)
+ %{source | retention_days: source_ttl_to_days(source, plan)}
+ end
+
+ def put_retention_days(source), do: source
+
+ @doc """
+ Formats a source TTL to the specified unit
+ """
+ @spec source_ttl_to_days(Source.t(), Plan.t()) :: integer()
+ def source_ttl_to_days(%Source{bigquery_table_ttl: ttl}, _plan)
+ when ttl >= 0 and ttl != nil do
+ round(ttl)
+ end
+
+ # fallback to plan value or default init value
+ # use min to avoid misrepresenting what user should see, in cases where actual is more than plan.
+ def source_ttl_to_days(_source, %Plan{limit_source_ttl: ttl}) do
+ min(
+ round(GenUtils.default_table_ttl_days()),
+ round(ttl / :timer.hours(24))
+ )
+ end
end
diff --git a/lib/logflare/sources/source.ex b/lib/logflare/sources/source.ex
index 979b892c9..8840b3bab 100644
--- a/lib/logflare/sources/source.ex
+++ b/lib/logflare/sources/source.ex
@@ -27,7 +27,8 @@ defmodule Logflare.Source do
:metrics,
:notifications,
:custom_event_message_keys,
- :backends
+ :backends,
+ :retention_days
]}
defp env_dataset_id_append,
do: Application.get_env(:logflare, Logflare.Google)[:dataset_id_append]
@@ -129,6 +130,7 @@ defmodule Logflare.Source do
field(:drop_lql_string, :string)
field(:v2_pipeline, :boolean, default: false)
field(:suggested_keys, :string, default: "")
+ field(:retention_days, :integer, virtual: true)
# Causes a shitstorm
# field :bigquery_schema, Ecto.Term
@@ -178,7 +180,8 @@ defmodule Logflare.Source do
:drop_lql_filters,
:drop_lql_string,
:v2_pipeline,
- :suggested_keys
+ :suggested_keys,
+ :retention_days
])
|> cast_embed(:notifications, with: &Notifications.changeset/2)
|> put_single_tenant_postgres_changes()
@@ -203,7 +206,8 @@ defmodule Logflare.Source do
:drop_lql_filters,
:drop_lql_string,
:v2_pipeline,
- :suggested_keys
+ :suggested_keys,
+ :retention_days
])
|> cast_embed(:notifications, with: &Notifications.changeset/2)
|> put_single_tenant_postgres_changes()
@@ -216,9 +220,15 @@ defmodule Logflare.Source do
|> unique_constraint(:name, name: :sources_name_index)
|> unique_constraint(:token)
|> unique_constraint(:public_token)
+ |> put_source_ttl_change()
|> validate_source_ttl(source)
end
+ defp put_source_ttl_change(changeset) do
+ value = get_field(changeset, :retention_days)
+ put_change(changeset, :bigquery_table_ttl, value)
+ end
+
def validate_source_ttl(changeset, source) do
if source.user_id do
user = Users.get(source.user_id)
diff --git a/lib/logflare/users/users.ex b/lib/logflare/users/users.ex
index c3676576b..945b0aca2 100644
--- a/lib/logflare/users/users.ex
+++ b/lib/logflare/users/users.ex
@@ -88,6 +88,9 @@ defmodule Logflare.Users do
user
|> Repo.preload([:sources, :billing_account, :team])
|> maybe_put_bigquery_defaults()
+ |> Map.update!(:sources, fn sources ->
+ Enum.map(sources, &Sources.put_retention_days/1)
+ end)
end
def preload_team(user) do
@@ -104,6 +107,9 @@ defmodule Logflare.Users do
def preload_sources(user) do
Repo.preload(user, :sources)
+ |> Map.update!(:sources, fn sources ->
+ Enum.map(sources, &Sources.put_retention_days/1)
+ end)
end
def preload_endpoints(user) do
diff --git a/lib/logflare_web/controllers/source_controller.ex b/lib/logflare_web/controllers/source_controller.ex
index 0ca766f3f..3c1f9bf47 100644
--- a/lib/logflare_web/controllers/source_controller.ex
+++ b/lib/logflare_web/controllers/source_controller.ex
@@ -439,22 +439,9 @@ defmodule LogflareWeb.SourceController do
end
end
- def update(%{assigns: %{source: old_source, user: user}} = conn, %{"source" => source_params}) do
- changeset = Source.update_by_user_changeset(old_source, source_params)
-
- case Repo.update(changeset) do
+ def update(%{assigns: %{source: old_source}} = conn, %{"source" => source_params}) do
+ case Sources.update_source_by_user(old_source, source_params) do
{:ok, source} ->
- ttl = source.bigquery_table_ttl
-
- if ttl do
- BigQuery.patch_table_ttl(
- source.token,
- source.bigquery_table_ttl * 86_400_000,
- user.bigquery_dataset_id,
- user.bigquery_project_id
- )
- end
-
:ok = Supervisor.ensure_started(source)
conn
diff --git a/lib/logflare_web/templates/source/dashboard.html.heex b/lib/logflare_web/templates/source/dashboard.html.heex
index d17d6ba13..e2f76e287 100644
--- a/lib/logflare_web/templates/source/dashboard.html.heex
+++ b/lib/logflare_web/templates/source/dashboard.html.heex
@@ -135,7 +135,7 @@
- <%= render(LogflareWeb.SharedView, "dashboard_source_metadata.html", conn: @conn, source: source, source_ttl_days: source_ttl_to_days(source, @plan), pipeline_counts: @pipeline_counts) %>
+ <%= render(LogflareWeb.SharedView, "dashboard_source_metadata.html", conn: @conn, source: source, source_ttl_days: source.retention_days, pipeline_counts: @pipeline_counts) %>
<% end %>
diff --git a/lib/logflare_web/templates/source/edit.html.eex b/lib/logflare_web/templates/source/edit.html.eex
index 78e541e41..09c3f954c 100644
--- a/lib/logflare_web/templates/source/edit.html.eex
+++ b/lib/logflare_web/templates/source/edit.html.eex
@@ -246,8 +246,8 @@
Set how long to keep data in your backend.
<%= form_for @changeset, Routes.source_path(@conn, :update, @source), fn e -> %>
- <%= text_input e, :bigquery_table_ttl, placeholder: "3", class: "form-control form-control-margin" %>
- <%= error_tag e, :bigquery_table_ttl %>
+ <%= text_input e, :retention_days, placeholder: "3", class: "form-control form-control-margin" %>
+ <%= error_tag e, :retention_days %>
Days to keep data.
diff --git a/lib/logflare_web/views/source_view.ex b/lib/logflare_web/views/source_view.ex
index 6790f0211..974a78f46 100644
--- a/lib/logflare_web/views/source_view.ex
+++ b/lib/logflare_web/views/source_view.ex
@@ -1,9 +1,6 @@
defmodule LogflareWeb.SourceView do
import LogflareWeb.Helpers.Forms
alias LogflareWeb.Router.Helpers, as: Routes
- alias Logflare.Billing.Plan
- alias Logflare.Source
- alias Logflare.Google.BigQuery.GenUtils
use LogflareWeb, :view
def log_url(route) do
@@ -20,22 +17,4 @@ defmodule LogflareWeb.SourceView do
end
|> URI.to_string()
end
-
- @doc """
- Formats a source TTL to the specified unit
- """
- @spec source_ttl_to_days(Source.t(), Plan.t()) :: integer()
- def source_ttl_to_days(%Source{bigquery_table_ttl: ttl}, _plan)
- when ttl >= 0 and ttl != nil do
- round(ttl)
- end
-
- # fallback to plan value or default init value
- # use min to avoid misrepresenting what user should see, in cases where actual is more than plan.
- def source_ttl_to_days(_source, %Plan{limit_source_ttl: ttl}) do
- min(
- round(GenUtils.default_table_ttl_days()),
- round(ttl / :timer.hours(24))
- )
- end
end
diff --git a/test/logflare/backends/adaptor/datadog_adaptor_test.exs b/test/logflare/backends/adaptor/datadog_adaptor_test.exs
index 29fabf1db..55a76cde7 100644
--- a/test/logflare/backends/adaptor/datadog_adaptor_test.exs
+++ b/test/logflare/backends/adaptor/datadog_adaptor_test.exs
@@ -13,6 +13,7 @@ defmodule Logflare.Backends.Adaptor.DatadogAdaptorTest do
setup do
start_supervised!(AllLogsLogged)
+ insert(:plan)
:ok
end
diff --git a/test/logflare/backends/buffer_producer_test.exs b/test/logflare/backends/buffer_producer_test.exs
index ab786076d..7c0a4e96a 100644
--- a/test/logflare/backends/buffer_producer_test.exs
+++ b/test/logflare/backends/buffer_producer_test.exs
@@ -6,6 +6,11 @@ defmodule Logflare.Backends.BufferProducerTest do
import ExUnit.CaptureLog
+ setup do
+ insert(:plan)
+ :ok
+ end
+
test "pulls events from IngestEventQueue" do
user = insert(:user)
source = insert(:source, user: user)
diff --git a/test/logflare/backends/dynamic_pipeline_test.exs b/test/logflare/backends/dynamic_pipeline_test.exs
index b4255ae2c..452d8ad9b 100644
--- a/test/logflare/backends/dynamic_pipeline_test.exs
+++ b/test/logflare/backends/dynamic_pipeline_test.exs
@@ -11,6 +11,7 @@ defmodule Logflare.DynamicPipelineTest do
import ExUnit.CaptureLog
setup do
+ insert(:plan)
user = insert(:user)
source = insert(:source, user: user)
diff --git a/test/logflare/backends/elastic_adaptor_test.exs b/test/logflare/backends/elastic_adaptor_test.exs
index 1c45bdf1e..197d65eb0 100644
--- a/test/logflare/backends/elastic_adaptor_test.exs
+++ b/test/logflare/backends/elastic_adaptor_test.exs
@@ -12,6 +12,7 @@ defmodule Logflare.Backends.Adaptor.ElasticAdaptorTest do
doctest @subject
setup do
+ insert(:plan)
start_supervised!(AllLogsLogged)
:ok
end
diff --git a/test/logflare/backends/ingest_events_queue_test.exs b/test/logflare/backends/ingest_events_queue_test.exs
index 508b416fe..4574a0224 100644
--- a/test/logflare/backends/ingest_events_queue_test.exs
+++ b/test/logflare/backends/ingest_events_queue_test.exs
@@ -8,6 +8,11 @@ defmodule Logflare.Backends.IngestEventQueueTest do
alias Logflare.Backends
alias Logflare.Backends.IngestEventQueue
+ setup do
+ insert(:plan)
+ :ok
+ end
+
test "get_table_size/1 returns nil for non-existing tables" do
assert nil == IngestEventQueue.get_table_size({1, 2, 4})
end
diff --git a/test/logflare/backends/webhook_adaptor_test.exs b/test/logflare/backends/webhook_adaptor_test.exs
index aeca52001..d9129b150 100644
--- a/test/logflare/backends/webhook_adaptor_test.exs
+++ b/test/logflare/backends/webhook_adaptor_test.exs
@@ -11,13 +11,13 @@ defmodule Logflare.Backends.WebhookAdaptorTest do
@subject Logflare.Backends.Adaptor.WebhookAdaptor
setup do
+ insert(:plan)
start_supervised!(AllLogsLogged)
:ok
end
describe "ingestion tests" do
setup do
- insert(:plan)
user = insert(:user)
source = insert(:source, user: user)
diff --git a/test/logflare/cluster_pubsub_test.exs b/test/logflare/cluster_pubsub_test.exs
index 885a1e500..4348f721d 100644
--- a/test/logflare/cluster_pubsub_test.exs
+++ b/test/logflare/cluster_pubsub_test.exs
@@ -63,6 +63,7 @@ defmodule Logflare.ClusterPubSubTest do
describe "ChannelTopics" do
setup do
+ insert(:plan)
[source: insert(:source, user: insert(:user))]
end
diff --git a/test/logflare/endpoints_test.exs b/test/logflare/endpoints_test.exs
index a7de852ad..e6285af67 100644
--- a/test/logflare/endpoints_test.exs
+++ b/test/logflare/endpoints_test.exs
@@ -6,6 +6,11 @@ defmodule Logflare.EndpointsTest do
alias Logflare.Endpoints.Query
alias Logflare.Backends.Adaptor.PostgresAdaptor
+ setup do
+ insert(:plan)
+ :ok
+ end
+
test "list_endpoints_by" do
%{id: id, name: name} = insert(:endpoint)
assert [%{id: ^id}] = Endpoints.list_endpoints_by(name: name)
@@ -120,7 +125,6 @@ defmodule Logflare.EndpointsTest do
{:ok, TestUtils.gen_bq_response([%{"testing" => "123"}])}
end)
- insert(:plan)
user = insert(:user)
insert(:source, user: user, name: "c")
endpoint = insert(:endpoint, user: user, query: "select current_datetime() as testing")
@@ -133,7 +137,6 @@ defmodule Logflare.EndpointsTest do
{:ok, TestUtils.gen_bq_response([%{"testing" => "123"}])}
end)
- insert(:plan)
user = insert(:user)
insert(:endpoint,
@@ -151,7 +154,6 @@ defmodule Logflare.EndpointsTest do
{:ok, TestUtils.gen_bq_response([%{"testing" => "123"}])}
end)
- insert(:plan)
user = insert(:user)
insert(:source, user: user, name: "c")
query_string = "select current_datetime() as testing"
@@ -165,7 +167,6 @@ defmodule Logflare.EndpointsTest do
{:ok, TestUtils.gen_bq_response([%{"testing" => "123"}])}
end)
- insert(:plan)
user = insert(:user)
endpoint = insert(:endpoint, user: user, query: "select current_datetime() as testing")
_pid = start_supervised!({Logflare.Endpoints.Cache, {endpoint, %{}}})
@@ -186,7 +187,6 @@ defmodule Logflare.EndpointsTest do
{:ok, TestUtils.gen_bq_response([%{"testing" => "123"}])}
end)
- insert(:plan)
user = insert(:user)
endpoint = insert(:endpoint, user: user, query: "select current_datetime() as testing")
cache_pid = start_supervised!({Logflare.Endpoints.Cache, {endpoint, %{}}})
@@ -212,8 +212,6 @@ defmodule Logflare.EndpointsTest do
describe "running queries in postgres backends" do
setup do
- insert(:plan)
-
cfg = Application.get_env(:logflare, Logflare.Repo)
url = "postgresql://#{cfg[:username]}:#{cfg[:password]}@#{cfg[:hostname]}/#{cfg[:database]}"
@@ -295,7 +293,6 @@ defmodule Logflare.EndpointsTest do
{:ok, TestUtils.gen_bq_response([%{"testing" => "123"}])}
end)
- insert(:plan)
user = insert(:user)
endpoint =
diff --git a/test/logflare/logs/rejected_logs_test.exs b/test/logflare/logs/rejected_logs_test.exs
index df01e24dc..d172d193e 100644
--- a/test/logflare/logs/rejected_logs_test.exs
+++ b/test/logflare/logs/rejected_logs_test.exs
@@ -5,6 +5,7 @@ defmodule Logflare.Logs.RejectedLogEventsTest do
alias Logflare.{Sources, Users, LogEvent}
setup do
+ insert(:plan)
s1 = build(:source)
s2 = build(:source)
sources = [s1, s2]
diff --git a/test/logflare/source/bigquery/schema_test.exs b/test/logflare/source/bigquery/schema_test.exs
index 647353562..c12e8696c 100644
--- a/test/logflare/source/bigquery/schema_test.exs
+++ b/test/logflare/source/bigquery/schema_test.exs
@@ -5,6 +5,11 @@ defmodule Logflare.Source.BigQuery.SchemaTest do
alias Logflare.Source.BigQuery.Schema
alias Logflare.Google.BigQuery.SchemaUtils
+ setup do
+ insert(:plan)
+ :ok
+ end
+
test "next_update_ts/1" do
next_update = Schema.next_update_ts(6) |> trunc()
assert String.length("#{next_update}") == String.length("#{System.system_time(:millisecond)}")
diff --git a/test/logflare/sources/sources_cache_test.exs b/test/logflare/sources/sources_cache_test.exs
index d06b48900..2cbd6c774 100644
--- a/test/logflare/sources/sources_cache_test.exs
+++ b/test/logflare/sources/sources_cache_test.exs
@@ -4,6 +4,7 @@ defmodule Logflare.SourcesCacheTest do
use Logflare.DataCase
setup do
+ insert(:plan)
u1 = insert(:user)
s01 = insert(:source, user_id: u1.id)
s02 = insert(:source, user_id: u1.id)
diff --git a/test/logflare/sources_test.exs b/test/logflare/sources_test.exs
index b1eb8af7e..7dc023cce 100644
--- a/test/logflare/sources_test.exs
+++ b/test/logflare/sources_test.exs
@@ -17,20 +17,63 @@ defmodule Logflare.SourcesTest do
describe "create_source/2" do
setup do
user = insert(:user)
- insert(:plan, name: "Free")
%{user: user}
end
test "creates a source for a given user and creates schema", %{
user: %{id: user_id} = user
} do
+ insert(:plan, name: "Free")
assert {:ok, source} = Sources.create_source(%{name: TestUtils.random_string()}, user)
assert %Source{user_id: ^user_id, v2_pipeline: false} = source
assert SourceSchemas.get_source_schema_by(source_id: source.id)
end
+
+ test "creates a source with different retention", %{
+ user: user
+ } do
+ insert(:plan, name: "Free", limit_source_ttl: :timer.hours(24) * 20)
+
+ assert {:ok, %Source{retention_days: 10}} =
+ Sources.create_source(%{name: "some name", retention_days: 10}, user)
+ end
+ end
+
+ describe "update_source/2 with different retention_days" do
+ setup do
+ user = insert(:user)
+
+ %{user: user}
+ end
+
+ test "valid retention days", %{
+ user: user
+ } do
+ Logflare.Google.BigQuery
+ |> expect(:patch_table_ttl, fn _source_id, _table_ttl, _dataset_id, _project_id ->
+ {:ok, %Tesla.Env{}}
+ end)
+
+ insert(:plan, name: "Free", limit_source_ttl: :timer.hours(24) * 20)
+ source = insert(:source, user: user)
+
+ assert {:ok, %Source{retention_days: 12, bigquery_table_ttl: 12}} =
+ Sources.update_source(source, %{retention_days: 12})
+ end
+
+ test "retention days exceeds", %{user: user} do
+ insert(:plan, name: "Free", limit_source_ttl: :timer.hours(24) * 1)
+ source = insert(:source, user: user)
+ assert {:error, %Ecto.Changeset{}} = Sources.update_source(source, %{retention_days: 12})
+ end
end
describe "list_sources_by_user/1" do
+ setup do
+ insert(:plan)
+ :ok
+ end
+
test "lists sources for a given user" do
user = insert(:user)
insert(:source, user: user)
@@ -117,6 +160,7 @@ defmodule Logflare.SourcesTest do
describe "preload_for_dashboard/1" do
setup do
+ insert(:plan)
[user: insert(:user)]
end
diff --git a/test/logflare/sql_test.exs b/test/logflare/sql_test.exs
index ab1ebe1fb..90f69c6c7 100644
--- a/test/logflare/sql_test.exs
+++ b/test/logflare/sql_test.exs
@@ -11,6 +11,7 @@ defmodule Logflare.SqlTest do
@env "test"
setup do
+ insert(:plan)
values = Application.get_env(:logflare, Logflare.Google)
to_put = Keyword.put(values, :project_id, @logflare_project_id)
Application.put_env(:logflare, Logflare.Google, to_put)
diff --git a/test/logflare/users/users_cache_test.exs b/test/logflare/users/users_cache_test.exs
index 85c8f14cb..704b2f7e0 100644
--- a/test/logflare/users/users_cache_test.exs
+++ b/test/logflare/users/users_cache_test.exs
@@ -5,6 +5,7 @@ defmodule Logflare.Users.CacheTest do
use Logflare.DataCase
setup do
+ insert(:plan)
source = build(:source, notifications: %{})
user =
diff --git a/test/logflare/users/users_test.exs b/test/logflare/users/users_test.exs
index e3a76ef81..197e022f2 100644
--- a/test/logflare/users/users_test.exs
+++ b/test/logflare/users/users_test.exs
@@ -6,6 +6,7 @@ defmodule Logflare.UsersTest do
alias Logflare.Users
setup do
+ insert(:plan)
user = insert(:user)
source = insert(:source, user_id: user.id)
source = Sources.get_by(token: source.token)
diff --git a/test/logflare_grpc/trace/server_test.exs b/test/logflare_grpc/trace/server_test.exs
index 57a5a52a9..3c99bfd32 100644
--- a/test/logflare_grpc/trace/server_test.exs
+++ b/test/logflare_grpc/trace/server_test.exs
@@ -6,6 +6,11 @@ defmodule LogflareGrpc.Trace.ServerTest do
alias Opentelemetry.Proto.Collector.Trace.V1.ExportTraceServiceResponse
alias Opentelemetry.Proto.Collector.Trace.V1.TraceService.Stub
+ setup do
+ insert(:plan)
+ :ok
+ end
+
describe "export/2" do
setup do
user = insert(:user)
diff --git a/test/logflare_web/controllers/api/endpoint_controller_test.exs b/test/logflare_web/controllers/api/endpoint_controller_test.exs
index c60dfd0eb..4ce382aa8 100644
--- a/test/logflare_web/controllers/api/endpoint_controller_test.exs
+++ b/test/logflare_web/controllers/api/endpoint_controller_test.exs
@@ -2,6 +2,7 @@ defmodule LogflareWeb.Api.EndpointControllerTest do
use LogflareWeb.ConnCase
setup do
+ insert(:plan)
endpoints = insert_list(2, :endpoint)
user = insert(:user, endpoint_queries: endpoints)
insert(:source, name: "logs", user: user)
diff --git a/test/logflare_web/controllers/api/query_controller_test.exs b/test/logflare_web/controllers/api/query_controller_test.exs
index b76ad0dda..e4d7d7f5b 100644
--- a/test/logflare_web/controllers/api/query_controller_test.exs
+++ b/test/logflare_web/controllers/api/query_controller_test.exs
@@ -86,8 +86,6 @@ defmodule LogflareWeb.Api.QueryControllerTest do
describe "pg_sql" do
setup do
- insert(:plan)
-
cfg = Application.get_env(:logflare, Logflare.Repo)
url = "postgresql://#{cfg[:username]}:#{cfg[:password]}@#{cfg[:hostname]}/#{cfg[:database]}"
diff --git a/test/logflare_web/controllers/api/source_controller_test.exs b/test/logflare_web/controllers/api/source_controller_test.exs
index a41061570..fe62b05ba 100644
--- a/test/logflare_web/controllers/api/source_controller_test.exs
+++ b/test/logflare_web/controllers/api/source_controller_test.exs
@@ -157,6 +157,46 @@ defmodule LogflareWeb.Api.SourceControllerTest do
end
end
+ describe "retention_days" do
+ setup do
+ Logflare.Google.BigQuery
+ |> expect(:patch_table_ttl, fn _source_id, _table_ttl, _dataset_id, _project_id ->
+ {:ok, %Tesla.Env{}}
+ end)
+
+ :ok
+ end
+
+ test "PUT updates retention_days", %{
+ conn: conn,
+ user: user,
+ sources: [source | _]
+ } do
+ assert %{"retention_days" => 3} =
+ conn
+ |> add_access_token(user, "private")
+ |> get("/api/sources/#{source.token}")
+ |> json_response(200)
+
+ assert %{"retention_days" => 1} =
+ conn
+ |> add_access_token(user, "private")
+ |> put("/api/sources/#{source.token}", %{name: "some name", retention_days: 1})
+ |> json_response(200)
+ end
+
+ test "PATCH updates retention_days", %{
+ conn: conn,
+ user: user,
+ sources: [source | _]
+ } do
+ conn
+ |> add_access_token(user, "private")
+ |> patch("/api/sources/#{source.token}", %{retention_days: 1})
+ |> response(204)
+ end
+ end
+
describe "add_backend/2" do
test "attaches a backend", %{conn: conn, user: user, sources: [source | _]} do
backend = insert(:backend, user: user)
diff --git a/test/logflare_web/plugs/fetch_resource_test.exs b/test/logflare_web/plugs/fetch_resource_test.exs
index b61da5f78..67e598344 100644
--- a/test/logflare_web/plugs/fetch_resource_test.exs
+++ b/test/logflare_web/plugs/fetch_resource_test.exs
@@ -6,6 +6,7 @@ defmodule LogflareWeb.Plugs.FetchResourceTest do
alias Logflare.Endpoints.Query
setup do
+ insert(:plan)
user = insert(:user)
endpoint = insert(:endpoint, user: user)
source = insert(:source, user: user)
diff --git a/test/logflare_web/plugs/verify_api_access_test.exs b/test/logflare_web/plugs/verify_api_access_test.exs
index 277f68e6b..3947bf8c7 100644
--- a/test/logflare_web/plugs/verify_api_access_test.exs
+++ b/test/logflare_web/plugs/verify_api_access_test.exs
@@ -4,6 +4,7 @@ defmodule LogflareWeb.Plugs.VerifyApiAccessTest do
alias LogflareWeb.Plugs.VerifyApiAccess
setup do
+ insert(:plan)
user = insert(:user)
endpoint_auth = insert(:endpoint, user: user, enable_auth: true)
endpoint_open = insert(:endpoint, user: user, enable_auth: false)