Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin' into add-gauge-subs
Browse files Browse the repository at this point in the history
  • Loading branch information
ayirr7 committed Apr 25, 2024
2 parents d2fc040 + 373c392 commit dfb335a
Show file tree
Hide file tree
Showing 14 changed files with 214 additions and 20 deletions.
4 changes: 2 additions & 2 deletions .craft.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@ statusProvider:
targets:
- id: release
name: docker
source: us.gcr.io/sentryio/snuba
source: us-central1-docker.pkg.dev/sentryio/snuba/image
target: getsentry/snuba
- id: latest
name: docker
source: us.gcr.io/sentryio/snuba
source: us-central1-docker.pkg.dev/sentryio/snuba/image
target: getsentry/snuba
targetFormat: '{{{target}}}:latest'
- name: github
2 changes: 1 addition & 1 deletion gocd/pipelines/snuba-sns-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ pipelines:
--container-name="spans-consumer" \
&& /devinfra/scripts/k8s/k8s-deploy.py \
--label-selector="service=snuba" \
--image="us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
--image="us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
--type="cronjob" \
--container-name="cleanup" \
--container-name="optimize"
Expand Down
10 changes: 5 additions & 5 deletions gocd/pipelines/snuba-stable.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ pipelines:
/devinfra/scripts/checks/googlecloud/checkcloudbuild.py \
${GO_REVISION_SNUBA_REPO} \
sentryio \
"us.gcr.io/sentryio/snuba"
"us-central1-docker.pkg.dev/sentryio/snuba/image"
- script: |
deploy_sha=`snuba/scripts/fetch_service_refs.py --pipeline "deploy-snuba-s4s"` && \
snuba/scripts/check-migrations.py --to $deploy_sha --workdir snuba
Expand All @@ -71,7 +71,7 @@ pipelines:
/devinfra/scripts/k8s/k8stunnel \
&& /devinfra/scripts/k8s/k8s-deploy.py \
--label-selector="service=snuba,is_canary=true" \
--image="us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
--image="us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
--container-name="api" \
--container-name="consumer" \
--container-name="errors-consumer" \
Expand Down Expand Up @@ -102,7 +102,7 @@ pipelines:
--container-name="transactions-subscriptions-scheduler" \
&& /devinfra/scripts/k8s/k8s-deploy.py \
--label-selector="service=snuba,is_canary=true" \
--image="us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
--image="us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
--type="cronjob" \
--container-name="cleanup" \
--container-name="optimize" \
Expand All @@ -127,7 +127,7 @@ pipelines:
/devinfra/scripts/k8s/k8stunnel \
&& /devinfra/scripts/k8s/k8s-deploy.py \
--label-selector="service=snuba" \
--image="us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
--image="us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
--container-name="api" \
--container-name="consumer" \
--container-name="errors-consumer" \
Expand Down Expand Up @@ -158,7 +158,7 @@ pipelines:
--container-name="transactions-subscriptions-scheduler" \
&& /devinfra/scripts/k8s/k8s-deploy.py \
--label-selector="service=snuba" \
--image="us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
--image="us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
--type="cronjob" \
--container-name="cleanup" \
--container-name="optimize" \
Expand Down
2 changes: 1 addition & 1 deletion gocd/templates/bash/check-cloud-build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@
/devinfra/scripts/checks/googlecloud/checkcloudbuild.py \
${GO_REVISION_SNUBA_REPO} \
sentryio \
"us.gcr.io/sentryio/snuba"
"us-central1-docker.pkg.dev/sentryio/snuba/image"
4 changes: 2 additions & 2 deletions gocd/templates/bash/deploy-st.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,12 @@ eval $(/devinfra/scripts/regions/project_env_vars.py --region="${SENTRY_REGION}"

/devinfra/scripts/k8s/k8s-deploy.py \
--label-selector="${LABEL_SELECTOR}" \
--image="us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
--image="us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
--container-name="snuba" \
--container-name="snuba-admin"

/devinfra/scripts/k8s/k8s-deploy.py \
--label-selector="${LABEL_SELECTOR}" \
--image="us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
--image="us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
--type="cronjob" \
--container-name="cleanup"
6 changes: 3 additions & 3 deletions gocd/templates/bash/deploy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ eval $(/devinfra/scripts/regions/project_env_vars.py --region="${SENTRY_REGION}"
/devinfra/scripts/k8s/k8stunnel \
&& /devinfra/scripts/k8s/k8s-deploy.py \
--label-selector="${LABEL_SELECTOR}" \
--image="us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
--image="us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
--container-name="api" \
--container-name="consumer" \
--container-name="errors-consumer" \
Expand Down Expand Up @@ -49,13 +49,13 @@ eval $(/devinfra/scripts/regions/project_env_vars.py --region="${SENTRY_REGION}"
--container-name="metrics-summaries-consumer" \
&& /devinfra/scripts/k8s/k8s-deploy.py \
--label-selector="${LABEL_SELECTOR}" \
--image="us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
--image="us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
--type="cronjob" \
--container-name="cleanup" \
--container-name="optimize" \
--container-name="cardinality-report" \
&& /devinfra/scripts/k8s/k8s-deploy.py \
--label-selector="${LABEL_SELECTOR}" \
--image="us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
--image="us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
--type="statefulset" \
--container-name="spans-exp-static-on"
2 changes: 1 addition & 1 deletion gocd/templates/bash/migrate-reverse.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ eval $(/devinfra/scripts/regions/project_env_vars.py --region="${SENTRY_REGION}"
--label-selector="service=${SNUBA_SERVICE_NAME}" \
--container-name="${SNUBA_SERVICE_NAME}" \
"snuba-migrate-reverse" \
"us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
"us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
-- \
snuba migrations reverse-in-progress
4 changes: 2 additions & 2 deletions gocd/templates/bash/migrate-st.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,14 @@ eval $(/devinfra/scripts/regions/project_env_vars.py --region="${SENTRY_REGION}"
--label-selector="service=${SNUBA_SERVICE_NAME}" \
--container-name="${SNUBA_SERVICE_NAME}" \
"snuba-bootstrap" \
"us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
"us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
-- \
snuba bootstrap --force --no-migrate

/devinfra/scripts/k8s/k8s-spawn-job.py \
--label-selector="service=${SNUBA_SERVICE_NAME}" \
--container-name="${SNUBA_SERVICE_NAME}" \
"snuba-migrate" \
"us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
"us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
-- \
snuba migrations migrate --check-dangerous
2 changes: 1 addition & 1 deletion gocd/templates/bash/migrate.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ eval $(/devinfra/scripts/regions/project_env_vars.py --region="${SENTRY_REGION}"
--label-selector="service=${SNUBA_SERVICE_NAME}" \
--container-name="${SNUBA_SERVICE_NAME}" \
"snuba-migrate" \
"us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
"us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
-- \
snuba migrations migrate --check-dangerous -r complete -r partial
2 changes: 1 addition & 1 deletion gocd/templates/bash/s4s-clickhouse-queries.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,6 @@ eval $(/devinfra/scripts/regions/project_env_vars.py --region="${SENTRY_REGION}"
--container-name="${SNUBA_COMPONENT_NAME}" \
--try-deployments-and-statefulsets \
"snuba-query-${SNUBA_CMD_TYPE}" \
"us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
"us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
-- \
snuba $SNUBA_CMD
2 changes: 1 addition & 1 deletion gocd/templates/bash/s4s-replay-queries.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ eval $(/devinfra/scripts/regions/project_env_vars.py --region="${SENTRY_REGION}"
--label-selector="service=snuba,component=${SNUBA_COMPONENT_NAME}" \
--container-name="${SNUBA_COMPONENT_NAME}" \
"snuba-query-replayer" \
"us.gcr.io/sentryio/snuba:${GO_REVISION_SNUBA_REPO}" \
"us-central1-docker.pkg.dev/sentryio/snuba/image:${GO_REVISION_SNUBA_REPO}" \
-- \
snuba query-replayer "${args[@]}"
1 change: 1 addition & 0 deletions snuba/migrations/group_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,6 +338,7 @@ def get_migrations(self) -> Sequence[str]:
"0042_rename_counters_meta_tables",
"0043_sets_meta_tables",
"0044_gauges_meta_tables",
"0045_distributions_meta_tables",
]


Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,192 @@
from typing import Sequence

from snuba.clickhouse.columns import AggregateFunction, Column, DateTime, String, UInt
from snuba.clusters.storage_sets import StorageSetKey
from snuba.migrations import migration, operations, table_engines
from snuba.migrations.columns import MigrationModifiers as Modifiers
from snuba.migrations.operations import OperationTarget
from snuba.utils.schemas import Float


class Migration(migration.ClickhouseNodeMigration):
blocking = False
granularity = "8192"
meta_view_name = "generic_metric_distributions_meta_mv"
meta_local_table_name = "generic_metric_distributions_meta_local"
meta_dist_table_name = "generic_metric_distributions_meta_dist"
meta_table_columns: Sequence[Column[Modifiers]] = [
Column("org_id", UInt(64)),
Column("project_id", UInt(64)),
Column("use_case_id", String(Modifiers(low_cardinality=True))),
Column("metric_id", UInt(64)),
Column("tag_key", UInt(64)),
Column("timestamp", DateTime(modifiers=Modifiers(codecs=["DoubleDelta"]))),
Column("retention_days", UInt(16)),
Column("count", AggregateFunction("sum", [Float(64)])),
]

tag_value_view_name = "generic_metric_distributions_meta_tag_values_mv"
tag_value_local_table_name = "generic_metric_distributions_meta_tag_values_local"
tag_value_dist_table_name = "generic_metric_distributions_meta_tag_values_dist"
tag_value_table_columns: Sequence[Column[Modifiers]] = [
Column("project_id", UInt(64)),
Column("metric_id", UInt(64)),
Column("tag_key", UInt(64)),
Column("tag_value", String()),
Column("timestamp", DateTime(modifiers=Modifiers(codecs=["DoubleDelta"]))),
Column("retention_days", UInt(16)),
Column("count", AggregateFunction("sum", [Float(64)])),
]

storage_set_key = StorageSetKey.GENERIC_METRICS_DISTRIBUTIONS

def forwards_ops(self) -> Sequence[operations.SqlOperation]:
return [
operations.CreateTable(
storage_set=self.storage_set_key,
table_name=self.meta_local_table_name,
engine=table_engines.AggregatingMergeTree(
storage_set=self.storage_set_key,
order_by="(org_id, project_id, use_case_id, metric_id, tag_key, timestamp)",
primary_key="(org_id, project_id, use_case_id, metric_id, tag_key, timestamp)",
partition_by="toMonday(timestamp)",
settings={
"index_granularity": self.granularity,
# Since the partitions contain multiple retention periods, need to ensure
# that rows within partitions are dropped
"ttl_only_drop_parts": 0,
},
ttl="timestamp + toIntervalDay(retention_days)",
),
columns=self.meta_table_columns,
target=OperationTarget.LOCAL,
),
operations.CreateTable(
storage_set=self.storage_set_key,
table_name=self.meta_dist_table_name,
engine=table_engines.Distributed(
local_table_name=self.meta_local_table_name, sharding_key=None
),
columns=self.meta_table_columns,
target=OperationTarget.DISTRIBUTED,
),
operations.CreateMaterializedView(
storage_set=self.storage_set_key,
view_name=self.meta_view_name,
columns=self.meta_table_columns,
destination_table_name=self.meta_local_table_name,
target=OperationTarget.LOCAL,
query="""
SELECT
org_id,
project_id,
use_case_id,
metric_id,
tag_key,
toMonday(timestamp) as timestamp,
retention_days,
sumState(count_value) as count
FROM generic_metric_distributions_raw_local
ARRAY JOIN tags.key AS tag_key
WHERE record_meta = 1
GROUP BY
org_id,
project_id,
use_case_id,
metric_id,
tag_key,
timestamp,
retention_days
""",
),
operations.CreateTable(
storage_set=self.storage_set_key,
table_name=self.tag_value_local_table_name,
engine=table_engines.AggregatingMergeTree(
storage_set=self.storage_set_key,
order_by="(project_id, metric_id, tag_key, tag_value, timestamp)",
primary_key="(project_id, metric_id, tag_key, tag_value, timestamp)",
partition_by="toMonday(timestamp)",
settings={
"index_granularity": self.granularity,
# Since the partitions contain multiple retention periods, need to ensure
# that rows within partitions are dropped
"ttl_only_drop_parts": 0,
},
ttl="timestamp + toIntervalDay(retention_days)",
),
columns=self.tag_value_table_columns,
target=OperationTarget.LOCAL,
),
operations.CreateTable(
storage_set=self.storage_set_key,
table_name=self.tag_value_dist_table_name,
engine=table_engines.Distributed(
local_table_name=self.tag_value_local_table_name, sharding_key=None
),
columns=self.tag_value_table_columns,
target=OperationTarget.DISTRIBUTED,
),
operations.CreateMaterializedView(
storage_set=self.storage_set_key,
view_name=self.tag_value_view_name,
columns=self.tag_value_table_columns,
destination_table_name=self.tag_value_local_table_name,
target=OperationTarget.LOCAL,
query="""
SELECT
project_id,
metric_id,
tag_key,
tag_value,
toMonday(timestamp) as timestamp,
retention_days,
sumState(count_value) as count
FROM generic_metric_distributions_raw_local
ARRAY JOIN
tags.key AS tag_key, tags.raw_value AS tag_value
WHERE record_meta = 1
GROUP BY
project_id,
metric_id,
tag_key,
tag_value,
timestamp,
retention_days
""",
),
]

def backwards_ops(self) -> Sequence[operations.SqlOperation]:
return [
operations.DropTable(
storage_set=self.storage_set_key,
table_name=self.tag_value_view_name,
target=OperationTarget.LOCAL,
),
operations.DropTable(
storage_set=self.storage_set_key,
table_name=self.tag_value_dist_table_name,
target=OperationTarget.DISTRIBUTED,
),
operations.DropTable(
storage_set=self.storage_set_key,
table_name=self.tag_value_local_table_name,
target=OperationTarget.LOCAL,
),
operations.DropTable(
storage_set=self.storage_set_key,
table_name=self.meta_view_name,
target=OperationTarget.LOCAL,
),
operations.DropTable(
storage_set=self.storage_set_key,
table_name=self.meta_dist_table_name,
target=OperationTarget.DISTRIBUTED,
),
operations.DropTable(
storage_set=self.storage_set_key,
table_name=self.meta_local_table_name,
target=OperationTarget.LOCAL,
),
]
1 change: 1 addition & 0 deletions snuba/state/cache/redis/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,7 @@ def build_notify_queue_key(task_ident: str) -> str:
task_ident = result[1].decode("utf-8")
task_timeout_remaining = int(result[2])
effective_timeout = min(task_timeout_remaining, timeout)
metrics.increment("task_waiting", tags=metric_tags)
logger.debug(
"Waiting for task result (%r) for up to %s seconds...",
task_ident,
Expand Down

0 comments on commit dfb335a

Please sign in to comment.