Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(alerts): on demand metric time series support #52867

Merged
merged 11 commits into from
Jul 18, 2023
40 changes: 23 additions & 17 deletions src/sentry/search/events/builder/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
class MetricsQueryBuilder(QueryBuilder):
requires_organization_condition = True
is_alerts_query = False
is_timeseries_query = False
organization_column: str = "organization_id"

def __init__(
Expand Down Expand Up @@ -98,7 +99,7 @@ def _resolve_on_demand_spec(
self, dataset: Optional[Dataset], selected_cols: List[Optional[str]], query: str
) -> Optional[OndemandMetricSpec]:
field = selected_cols[0] if selected_cols else None
if not self.is_alerts_query or not field:
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we need this to execute also for TimeseriesQueryBuilder now

if not field:
return None

if not is_on_demand_query(dataset, field, query):
Expand All @@ -111,30 +112,32 @@ def _resolve_on_demand_spec(
return None

def _get_on_demand_metrics_query(self) -> Optional[MetricsQuery]:
if not self.is_performance or not self.is_alerts_query:
return None

spec = self._on_demand_spec

# TimeseriesQueryBuilder specific parameters
if self.is_timeseries_query:
limit = None
alias = "count"
else:
limit = self.limit
alias = spec.mri
obostjancic marked this conversation as resolved.
Show resolved Hide resolved

return MetricsQuery(
select=[MetricField(spec.op, spec.mri, alias=spec.mri)],
select=[MetricField(spec.op, spec.mri, alias=alias)],
where=[
Condition(
lhs=Column(QUERY_HASH_KEY),
op=Op.EQ,
rhs=spec.query_hash(),
),
],
# TODO(ogi): groupby and orderby
limit=self.limit,
limit=limit,
offset=self.offset,
granularity=self.resolve_granularity(),
is_alerts_query=self.is_alerts_query,
org_id=self.params.organization.id,
project_ids=[p.id for p in self.params.projects],
# We do not need the series here, as later, we only extract the totals and assign it to the
# request.query
include_series=False,
include_series=True,
start=self.params.start,
end=self.params.end,
)
Expand Down Expand Up @@ -768,7 +771,6 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any:
metric_query = transform_mqb_query_to_metrics_query(
self.get_metrics_layer_snql_query().query, self.is_alerts_query
)
# metric_query.where = metric_query_ondemand.where
with sentry_sdk.start_span(op="metric_layer", description="run_query"):
metrics_data = get_series(
projects=self.params.projects,
Expand All @@ -792,7 +794,7 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any:
data.update(group["totals"])
metric_layer_result["data"].append(data)
for meta in metric_layer_result["meta"]:
if data[meta["name"]] is None:
if data.get(meta["name"]) is None:
obostjancic marked this conversation as resolved.
Show resolved Hide resolved
data[meta["name"]] = self.get_default_value(meta["type"])

return metric_layer_result
Expand Down Expand Up @@ -1038,6 +1040,7 @@ def resolve_split_granularity(self) -> Tuple[List[Condition], Optional[Granulari

class TimeseriesMetricQueryBuilder(MetricsQueryBuilder):
time_alias = "time"
is_timeseries_query = True

def __init__(
self,
Expand Down Expand Up @@ -1180,7 +1183,7 @@ def get_snql_query(self) -> List[Request]:
return queries

def run_query(self, referrer: str, use_cache: bool = False) -> Any:
if self.use_metrics_layer:
if self.use_metrics_layer or self._on_demand_spec:
from sentry.snuba.metrics.datasource import get_series
from sentry.snuba.metrics.mqb_query_transformer import (
transform_mqb_query_to_metrics_query,
Expand All @@ -1189,13 +1192,16 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any:
snuba_query = self.get_snql_query()[0].query
try:
with sentry_sdk.start_span(op="metric_layer", description="transform_query"):
metric_query = transform_mqb_query_to_metrics_query(
snuba_query, self.is_alerts_query
)
if self._on_demand_spec:
metrics_query = self._get_on_demand_metrics_query()
else:
metrics_query = transform_mqb_query_to_metrics_query(
snuba_query, self.is_alerts_query
)
with sentry_sdk.start_span(op="metric_layer", description="run_query"):
metrics_data = get_series(
projects=self.params.projects,
metrics_query=metric_query,
metrics_query=metrics_query,
use_case_id=UseCaseID.TRANSACTIONS
if self.is_performance
else UseCaseID.SESSIONS,
Expand Down
3 changes: 2 additions & 1 deletion src/sentry/snuba/metrics/extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,7 @@
"browser.name",
"os.name",
"geo.country_code",
# event.type is accepted as a valid search field but it will not me converted to a condition for metric extraction
"event.type",
]

Expand Down Expand Up @@ -306,7 +307,7 @@ def __init__(self, field: str, query: str):

# On-demand metrics are implicitly transaction metrics. Remove the
# filter from the query since it can't be translated to a RuleCondition.
self._query = re.sub(r"event\.type:transaction\s*", "", query)
self._query = re.sub(r"event\.type:transaction\s*", "", query).strip()
self._init_aggregate(field)

def _init_aggregate(self, aggregate: str) -> None:
Expand Down
41 changes: 40 additions & 1 deletion tests/sentry/search/events/builder/test_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -1697,7 +1697,7 @@ def test_transaction_in_filter(self):
def test_project_filter(self):
query = TimeseriesMetricQueryBuilder(
self.params,
dataset=Dataset.PerformanceMetrics,
dataset=Dataset.Metrics,
interval=900,
query=f"project:{self.project.slug}",
selected_columns=["p95(transaction.duration)"],
Expand Down Expand Up @@ -1953,6 +1953,45 @@ def test_no_error_if_aggregates_disallowed_but_no_aggregates_included(self):
allow_metric_aggregates=False,
)

def test_on_demand_metrics(self):
query = TimeseriesMetricQueryBuilder(
self.params,
dataset=Dataset.PerformanceMetrics,
interval=900,
query="transaction.duration:>0",
selected_columns=["count()"],
)
result = query.run_query("test_query")
assert result["data"][:5] == [
{
"time": self.start.isoformat(),
"count": 0,
},
{
"time": (self.start + datetime.timedelta(hours=1)).isoformat(),
"count": 0,
},
{
"time": (self.start + datetime.timedelta(hours=2)).isoformat(),
"count": 0,
},
{
"time": (self.start + datetime.timedelta(hours=3)).isoformat(),
"count": 0,
},
{
"time": (self.start + datetime.timedelta(hours=4)).isoformat(),
"count": 0,
},
]
self.assertCountEqual(
result["meta"],
[
{"name": "time", "type": "DateTime('Universal')"},
{"name": "count", "type": "Float64"},
],
)


class HistogramMetricQueryBuilderTest(MetricBuilderBaseTest):
def test_histogram_columns_set_on_builder(self):
Expand Down
1 change: 1 addition & 0 deletions tests/sentry/snuba/test_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ def test_is_on_demand_query_invalid_query():
assert is_on_demand_query(dataset, "count()", "AND") is False
assert is_on_demand_query(dataset, "count()", "(AND transaction.duration:>=1") is False
assert is_on_demand_query(dataset, "count()", "transaction.duration:>=abc") is False
assert is_on_demand_query(dataset, "count_if(}", "") is False


def test_is_on_demand_query_true():
Expand Down
Loading