Skip to content

Commit

Permalink
ref: Record cogs is a setting not runtime config (#5423)
Browse files Browse the repository at this point in the history
It should be a boolean (either on or off), and should be part of settings not runtime config (which requires manual configuration via UI on each tenant)

Both the Rust and Python consumers record based on the value from settings.

Depends on getsentry/ops#9061
  • Loading branch information
lynnagara committed Jan 19, 2024
1 parent 8d35a12 commit c265200
Show file tree
Hide file tree
Showing 6 changed files with 16 additions and 8 deletions.
2 changes: 2 additions & 0 deletions rust_snuba/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ pub struct EnvConfig {
pub default_retention_days: u16,
pub lower_retention_days: u16,
pub valid_retention_days: HashSet<u16>,
pub record_cogs: bool,
}

impl Default for EnvConfig {
Expand All @@ -116,6 +117,7 @@ impl Default for EnvConfig {
default_retention_days: 90,
lower_retention_days: 30,
valid_retention_days: [30, 90].iter().cloned().collect(),
record_cogs: false,
}
}
}
Expand Down
6 changes: 3 additions & 3 deletions rust_snuba/src/factory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,10 +131,10 @@ impl ProcessingStrategyFactory<KafkaPayload> for ConsumerStrategyFactory {

let cogs_label = get_cogs_label(&self.storage_config.message_processor.python_class_name);

// Produce cogs if generic metrics and we are not skipping writes
// Produce cogs if generic metrics AND we are not skipping writes AND record_cogs is true
let next_step: Box<dyn ProcessingStrategy<BytesInsertBatch>> =
match (self.skip_write, cogs_label) {
(false, Some(resource_id)) => Box::new(RecordCogs::new(
match (self.skip_write, self.env_config.record_cogs, cogs_label) {
(false, true, Some(resource_id)) => Box::new(RecordCogs::new(
next_step,
resource_id,
self.accountant_topic_config.broker_config.clone(),
Expand Down
3 changes: 3 additions & 0 deletions snuba/consumers/consumer_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ class EnvConfig:
default_retention_days: int
lower_retention_days: int
valid_retention_days: list[int]
record_cogs: bool


@dataclass(frozen=True)
Expand Down Expand Up @@ -124,13 +125,15 @@ def _resolve_env_config() -> EnvConfig:
default_retention_days = settings.DEFAULT_RETENTION_DAYS
lower_retention_days = settings.LOWER_RETENTION_DAYS
valid_retention_days = list(settings.VALID_RETENTION_DAYS)
record_cogs = settings.RECORD_COGS
return EnvConfig(
sentry_dsn=sentry_dsn,
dogstatsd_host=dogstatsd_host,
dogstatsd_port=dogstatsd_port,
default_retention_days=default_retention_days,
lower_retention_days=lower_retention_days,
valid_retention_days=valid_retention_days,
record_cogs=record_cogs,
)


Expand Down
5 changes: 2 additions & 3 deletions snuba/datasets/processors/generic_metrics_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import zlib
from abc import ABC, abstractmethod, abstractproperty
from datetime import datetime, timezone
from random import random
from typing import (
Any,
Iterable,
Expand All @@ -18,6 +17,7 @@
from sentry_kafka_schemas.schema_types.snuba_generic_metrics_v1 import GenericMetric
from usageaccountant import UsageUnit

from snuba import settings
from snuba.cogs.accountant import record_cogs
from snuba.consumers.types import KafkaMessageMetadata
from snuba.datasets.events_format import EventTooOld, enforce_retention
Expand All @@ -37,7 +37,6 @@
)
from snuba.datasets.processors import DatasetMessageProcessor
from snuba.processor import InsertBatch, ProcessedMessage, _ensure_valid_date
from snuba.state import get_config

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -167,7 +166,7 @@ def process_message(
)

def __record_cogs(self, message: GenericMetric) -> None:
if random() < (get_config("gen_metrics_processor_cogs_probability") or 0):
if settings.RECORD_COGS:
record_cogs(
resource_id=self._resource_id,
app_feature=f"genericmetrics_{message['use_case_id']}",
Expand Down
3 changes: 3 additions & 0 deletions snuba/settings/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,9 @@ class RedisClusters(TypedDict):
# Query Recording Options
RECORD_QUERIES = False

# Record COGS
RECORD_COGS = False

# Runtime Config Options
CONFIG_MEMOIZE_TIMEOUT = 10
CONFIG_STATE: Mapping[str, Optional[Any]] = {}
Expand Down
5 changes: 3 additions & 2 deletions tests/datasets/test_generic_metrics_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import pytest
from usageaccountant import UsageUnit

from snuba import state
from snuba import settings
from snuba.datasets.metrics_messages import InputType
from snuba.datasets.processors.generic_metrics_processor import (
GenericDistributionsMetricsProcessor,
Expand Down Expand Up @@ -154,7 +154,8 @@ def test_record_cogs(dis_processor: GenericDistributionsMetricsProcessor) -> Non
"retention_days": 22,
"mapping_meta": MAPPING_META_COMMON,
}
state.set_config("gen_metrics_processor_cogs_probability", 1.0)
settings.RECORD_COGS = True

with mock.patch(
"snuba.datasets.processors.generic_metrics_processor.record_cogs"
) as record_cogs:
Expand Down

0 comments on commit c265200

Please sign in to comment.