Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Unified GCP Service Account #44

Merged
merged 2 commits into from
May 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions config.rb
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,7 @@ def self.e2e_test?
override :e2e_test, "0"
override :backup_retention_days, 7, int
optional :lantern_backend_database_url, string
override :lantern_log_dataset, "lantern_logs", string

# Cloudflare
optional :cf_token, string
Expand Down
85 changes: 85 additions & 0 deletions lib/hosting/gcp_apis.rb
Original file line number Diff line number Diff line change
Expand Up @@ -332,6 +332,91 @@ def allow_bucket_usage_by_prefix(service_account_email, bucket_name, prefix)
Hosting::GcpApis.check_errors(response)
end

def create_big_query_table(dataset, table, schema)
connection = Excon.new("https://bigquery.googleapis.com", headers: @host[:headers])

request_body = {
tableReference: {
projectId: @project,
datasetId: dataset,
tableId: table
},
schema: {
fields: schema
}
}

response = connection.post(
path: "/bigquery/v2/projects/#{@project}/datasets/#{dataset}/tables",
body: JSON.dump(request_body),
headers: {"Content-Type" => "application/json"},
expects: [200, 400, 403]
)

Hosting::GcpApis.check_errors(response)
end

def allow_access_to_big_query_dataset(service_account_email, dataset)
connection = Excon.new("https://bigquery.googleapis.com", headers: @host[:headers])
response = connection.get(
path: "/bigquery/v2/projects/#{@project}/datasets/#{dataset}",
expects: [200, 400, 403]
)

Hosting::GcpApis.check_errors(response)
body = JSON.parse(response.body)
access = body["access"]
access << {
role: "roles/bigquery.metadataViewer",
userByEmail: service_account_email
}

response = connection.patch(
path: "/bigquery/v2/projects/#{@project}/datasets/#{dataset}",
body: JSON.dump({
access: access
}),
expects: [200, 400, 403]
)
Hosting::GcpApis.check_errors(response)
end

def allow_access_to_big_query_table(service_account_email, dataset, table)
connection = Excon.new("https://bigquery.googleapis.com", headers: @host[:headers])
response = connection.post(
path: "/bigquery/v2/projects/#{@project}/datasets/#{dataset}/tables/#{table}:getIamPolicy",
body: JSON.dump({}),
expects: [200, 400, 403]
)

Hosting::GcpApis.check_errors(response)

policy = JSON.parse(response.body)

policy["bindings"] ||= []
policy["bindings"] << {
role: "roles/bigquery.dataEditor",
members: ["serviceAccount:#{service_account_email}"]
}

response = connection.post(
path: "/bigquery/v2/projects/#{@project}/datasets/#{dataset}/tables/#{table}:setIamPolicy",
body: JSON.dump({policy: policy}),
expects: [200, 400, 403]
)

Hosting::GcpApis.check_errors(response)
end

def remove_big_query_table(dataset, table)
connection = Excon.new("https://bigquery.googleapis.com", headers: @host[:headers])

connection.delete(
path: "/bigquery/v2/projects/#{@project}/datasets/#{dataset}/tables/#{table}",
expects: [204, 404]
)
end

def create_image(name:, vm_name:, zone:, description: "", family: "lantern-ubuntu")
connection = Excon.new(@host[:connection_string], headers: @host[:headers])
body = {
Expand Down
22 changes: 22 additions & 0 deletions migrate/20240520_lantern_resource_sa.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# frozen_string_literal: true

Sequel.migration do
change do
alter_table(:lantern_resource) do
add_column :gcp_creds_b64, :text, collate: '"C"'
add_column :service_account_name, :text, collate: '"C"'
end

sql = <<SQL
UPDATE lantern_resource lr
SET gcp_creds_b64 = lt.gcp_creds_b64,
service_account_name = lt.service_account_name
FROM lantern_server ls JOIN lantern_timeline lt ON ls.timeline_access='push' AND ls.timeline_id=lt.id WHERE ls.resource_id = lr.id;
SQL
run sql

alter_table(:lantern_timeline) do
drop_column :service_account_name
end
end
end
53 changes: 53 additions & 0 deletions model/lantern/lantern_resource.rb
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,17 @@ class LanternResource < Sequel::Model
enc.column :superuser_password
enc.column :db_user_password
enc.column :repl_password
enc.column :gcp_creds_b64
end

def self.ubid_to_name(id)
id.to_s[0..7]
end

def big_query_table
"#{name}_logs"
end

def hyper_tag_name(project)
"project/#{project.ubid}/location/#{location}/lantern/#{name}"
end
Expand Down Expand Up @@ -65,6 +70,54 @@ def dissociate_forks
}
end

def setup_service_account
api = Hosting::GcpApis.new
service_account = api.create_service_account("lt-#{ubid}", "Service Account for Lantern #{name}")
key = api.export_service_account_key(service_account["email"])
update(gcp_creds_b64: key, service_account_name: service_account["email"])
end

def allow_timeline_access_to_bucket
timeline.update(gcp_creds_b64: gcp_creds_b64)
api = Hosting::GcpApis.new
api.allow_bucket_usage_by_prefix(service_account_name, Config.lantern_backup_bucket, timeline.ubid)
end

def create_logging_table
api = Hosting::GcpApis.new
schema = [
{name: "log_time", type: "TIMESTAMP", mode: "NULLABLE"},
{name: "user_name", type: "STRING", mode: "NULLABLE"},
{name: "database_name", type: "STRING", mode: "NULLABLE"},
{name: "process_id", type: "INTEGER", mode: "NULLABLE"},
{name: "connection_from", type: "STRING", mode: "NULLABLE"},
{name: "session_id", type: "STRING", mode: "NULLABLE"},
{name: "session_line_num", type: "INTEGER", mode: "NULLABLE"},
{name: "command_tag", type: "STRING", mode: "NULLABLE"},
{name: "session_start_time", type: "TIMESTAMP", mode: "NULLABLE"},
{name: "virtual_transaction_id", type: "STRING", mode: "NULLABLE"},
{name: "transaction_id", type: "INTEGER", mode: "NULLABLE"},
{name: "error_severity", type: "STRING", mode: "NULLABLE"},
{name: "sql_state_code", type: "STRING", mode: "NULLABLE"},
{name: "duration", type: "FLOAT", mode: "NULLABLE"},
{name: "message", type: "STRING", mode: "NULLABLE"},
{name: "detail", type: "STRING", mode: "NULLABLE"},
{name: "hint", type: "STRING", mode: "NULLABLE"},
{name: "internal_query", type: "STRING", mode: "NULLABLE"},
{name: "internal_query_pos", type: "INTEGER", mode: "NULLABLE"},
{name: "context", type: "STRING", mode: "NULLABLE"},
{name: "query", type: "STRING", mode: "NULLABLE"},
{name: "query_pos", type: "INTEGER", mode: "NULLABLE"},
{name: "location", type: "STRING", mode: "NULLABLE"},
{name: "application_name", type: "STRING", mode: "NULLABLE"}
]
api.create_big_query_table(Config.lantern_log_dataset, big_query_table, schema)
# Add metadata viewer access
api.allow_access_to_big_query_dataset(service_account_name, Config.lantern_log_dataset)
# Add access to only this table
api.allow_access_to_big_query_table(service_account_name, Config.lantern_log_dataset, big_query_table)
end

module HaType
NONE = "none"
ASYNC = "async"
Expand Down
4 changes: 3 additions & 1 deletion model/lantern/lantern_server.rb
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,9 @@ def configure_hash
postgresql_recover_from_backup: backup_label,
postgresql_recovery_target_time: resource.restore_target || "",
gcp_creds_walg_b64: walg_config[:gcp_creds_b64],
walg_gs_prefix: walg_config[:walg_gs_prefix]
walg_gs_prefix: walg_config[:walg_gs_prefix],
gcp_creds_big_query_b64: resource.gcp_creds_b64,
big_query_dataset: Config.lantern_log_dataset
})
end

Expand Down
1 change: 0 additions & 1 deletion model/lantern/lantern_timeline.rb
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
class LanternTimeline < Sequel::Model
one_to_one :strand, key: :id
many_to_one :parent, key: :parent_id, class: self
one_to_many :children, key: :parent_id, class: self
one_to_one :leader, class: LanternServer, key: :timeline_id, conditions: {timeline_access: "push"}

include ResourceMethods
Expand Down
18 changes: 14 additions & 4 deletions prog/lantern/lantern_resource_nexus.rb
Original file line number Diff line number Diff line change
Expand Up @@ -115,11 +115,14 @@ def before_run
end

label def start
nap 5 unless representative_server.vm.strand.label == "wait"
register_deadline(:wait, 10 * 60)
# bud self.class, frame, :trigger_pg_current_xact_id_on_parent if lantern_resource.parent
lantern_resource.setup_service_account
lantern_resource.create_logging_table

if lantern_resource.parent_id.nil?
lantern_resource.allow_timeline_access_to_bucket
end

# hop_wait_trigger_pg_current_xact_id_on_parent
register_deadline(:wait, 10 * 60)
hop_wait_servers
end

Expand Down Expand Up @@ -166,6 +169,13 @@ def before_run
end

lantern_resource.doctor&.incr_destroy

if lantern_resource.service_account_name
api = Hosting::GcpApis.new
api.remove_big_query_table(Config.lantern_log_dataset, lantern_resource.big_query_table)
api.remove_service_account(lantern_resource.service_account_name)
end

lantern_resource.dissociate_with_project(lantern_resource.project)
lantern_resource.destroy

Expand Down
3 changes: 2 additions & 1 deletion prog/lantern/lantern_server_nexus.rb
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def before_run
end

label def start
nap 5 unless vm.strand.label == "wait" && lantern_server.timeline.strand.label != "start"
nap 5 unless vm.strand.label == "wait" && lantern_server.resource.strand.label != "start"

lantern_server.incr_initial_provisioning

Expand Down Expand Up @@ -191,6 +191,7 @@ def before_run
lantern_server.timeline_id = timeline_id
lantern_server.timeline_access = "push"
lantern_server.save_changes
lantern_server.resource.allow_timeline_access_to_bucket

lantern_version = lantern_server.run_query("SELECT extversion FROM pg_extension WHERE extname='lantern'")
extras_version = lantern_server.run_query("SELECT extversion FROM pg_extension WHERE extname='lantern_extras'")
Expand Down
14 changes: 3 additions & 11 deletions prog/lantern/lantern_timeline_nexus.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,15 @@ class Prog::Lantern::LanternTimelineNexus < Prog::Base

semaphore :destroy

def self.assemble(parent_id: nil)
def self.assemble(gcp_creds_b64: nil, parent_id: nil)
if parent_id && (LanternTimeline[parent_id]).nil?
fail "No existing parent"
end

DB.transaction do
lantern_timeline = LanternTimeline.create_with_id(
parent_id: parent_id
parent_id: parent_id,
gcp_creds_b64: gcp_creds_b64
)
Strand.create(prog: "Lantern::LanternTimelineNexus", label: "start") { _1.id = lantern_timeline.id }
end
Expand All @@ -32,11 +33,6 @@ def before_run
end

label def start
api = Hosting::GcpApis.new
service_account = api.create_service_account("lt-#{lantern_timeline.ubid}", "Service Account for Timeline #{lantern_timeline.ubid}")
key = api.export_service_account_key(service_account["email"])
api.allow_bucket_usage_by_prefix(service_account["email"], Config.lantern_backup_bucket, lantern_timeline.ubid)
lantern_timeline.update(service_account_name: service_account["email"], gcp_creds_b64: key)
hop_wait_leader
end

Expand Down Expand Up @@ -92,9 +88,5 @@ def before_run
def destroy_blob_storage
# TODO
# Remove all backups
if lantern_timeline.service_account_name
api = Hosting::GcpApis.new
api.remove_service_account(lantern_timeline.service_account_name)
end
end
end
2 changes: 2 additions & 0 deletions rhizome/lantern/bin/configure
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@ def setup_env
f.puts("POSTGRESQL_LOG_LINE_PREFIX=lantern-logline: app: %a user: %u time: %t proc_start: %s pid: %p linenumber: %l === ")
f.puts("POSTGRESQL_LOG_DURATION=true")
f.puts("POSTGRESQL_LOG_MIN_DURATION_STATEMENT=250ms")
f.puts("GOOGLE_APPLICATION_CREDENTIALS_BIGQUERY_B64=#{$configure_hash["gcp_creds_big_query_b64"]}")
f.puts("BIGQUERY_DATASET=#{$configure_hash["big_query_dataset"]}")

if $configure_hash["enable_auditlogs"]
f.puts("POSTGRESQL_PGAUDIT_LOG=READ,WRITE")
Expand Down
Loading