diff --git a/src/sentry/search/events/builder/metrics.py b/src/sentry/search/events/builder/metrics.py index c287561d2d0a8b..4fa03ff8672c30 100644 --- a/src/sentry/search/events/builder/metrics.py +++ b/src/sentry/search/events/builder/metrics.py @@ -50,6 +50,7 @@ class MetricsQueryBuilder(QueryBuilder): requires_organization_condition = True is_alerts_query = False + is_timeseries_query = False organization_column: str = "organization_id" def __init__( @@ -98,7 +99,7 @@ def _resolve_on_demand_spec( self, dataset: Optional[Dataset], selected_cols: List[Optional[str]], query: str ) -> Optional[OndemandMetricSpec]: field = selected_cols[0] if selected_cols else None - if not self.is_alerts_query or not field: + if not field: return None if not is_on_demand_query(dataset, field, query): @@ -111,13 +112,18 @@ def _resolve_on_demand_spec( return None def _get_on_demand_metrics_query(self) -> Optional[MetricsQuery]: - if not self.is_performance or not self.is_alerts_query: - return None - spec = self._on_demand_spec + # TimeseriesQueryBuilder specific parameters + if self.is_timeseries_query: + limit = None + alias = "count" + else: + limit = self.limit + alias = spec.mri + return MetricsQuery( - select=[MetricField(spec.op, spec.mri, alias=spec.mri)], + select=[MetricField(spec.op, spec.mri, alias=alias)], where=[ Condition( lhs=Column(QUERY_HASH_KEY), @@ -125,16 +131,13 @@ def _get_on_demand_metrics_query(self) -> Optional[MetricsQuery]: rhs=spec.query_hash(), ), ], - # TODO(ogi): groupby and orderby - limit=self.limit, + limit=limit, offset=self.offset, granularity=self.resolve_granularity(), is_alerts_query=self.is_alerts_query, org_id=self.params.organization.id, project_ids=[p.id for p in self.params.projects], - # We do not need the series here, as later, we only extract the totals and assign it to the - # request.query - include_series=False, + include_series=True, start=self.params.start, end=self.params.end, ) @@ -768,7 +771,6 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any: metric_query = transform_mqb_query_to_metrics_query( self.get_metrics_layer_snql_query().query, self.is_alerts_query ) - # metric_query.where = metric_query_ondemand.where with sentry_sdk.start_span(op="metric_layer", description="run_query"): metrics_data = get_series( projects=self.params.projects, @@ -792,7 +794,7 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any: data.update(group["totals"]) metric_layer_result["data"].append(data) for meta in metric_layer_result["meta"]: - if data[meta["name"]] is None: + if data.get(meta["name"]) is None: data[meta["name"]] = self.get_default_value(meta["type"]) return metric_layer_result @@ -1038,6 +1040,7 @@ def resolve_split_granularity(self) -> Tuple[List[Condition], Optional[Granulari class TimeseriesMetricQueryBuilder(MetricsQueryBuilder): time_alias = "time" + is_timeseries_query = True def __init__( self, @@ -1180,7 +1183,7 @@ def get_snql_query(self) -> List[Request]: return queries def run_query(self, referrer: str, use_cache: bool = False) -> Any: - if self.use_metrics_layer: + if self.use_metrics_layer or self._on_demand_spec: from sentry.snuba.metrics.datasource import get_series from sentry.snuba.metrics.mqb_query_transformer import ( transform_mqb_query_to_metrics_query, @@ -1189,13 +1192,16 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any: snuba_query = self.get_snql_query()[0].query try: with sentry_sdk.start_span(op="metric_layer", description="transform_query"): - metric_query = transform_mqb_query_to_metrics_query( - snuba_query, self.is_alerts_query - ) + if self._on_demand_spec: + metrics_query = self._get_on_demand_metrics_query() + else: + metrics_query = transform_mqb_query_to_metrics_query( + snuba_query, self.is_alerts_query + ) with sentry_sdk.start_span(op="metric_layer", description="run_query"): metrics_data = get_series( projects=self.params.projects, - metrics_query=metric_query, + metrics_query=metrics_query, use_case_id=UseCaseID.TRANSACTIONS if self.is_performance else UseCaseID.SESSIONS, diff --git a/src/sentry/snuba/metrics/extraction.py b/src/sentry/snuba/metrics/extraction.py index c747391311d3ec..eab5be778f6b1f 100644 --- a/src/sentry/snuba/metrics/extraction.py +++ b/src/sentry/snuba/metrics/extraction.py @@ -135,6 +135,7 @@ "browser.name", "os.name", "geo.country_code", + # event.type is accepted as a valid search field but it will not me converted to a condition for metric extraction "event.type", ] @@ -306,7 +307,7 @@ def __init__(self, field: str, query: str): # On-demand metrics are implicitly transaction metrics. Remove the # filter from the query since it can't be translated to a RuleCondition. - self._query = re.sub(r"event\.type:transaction\s*", "", query) + self._query = re.sub(r"event\.type:transaction\s*", "", query).strip() self._init_aggregate(field) def _init_aggregate(self, aggregate: str) -> None: diff --git a/tests/sentry/search/events/builder/test_metrics.py b/tests/sentry/search/events/builder/test_metrics.py index a2a9593a9b50b2..af22e19c6a96a9 100644 --- a/tests/sentry/search/events/builder/test_metrics.py +++ b/tests/sentry/search/events/builder/test_metrics.py @@ -1697,7 +1697,7 @@ def test_transaction_in_filter(self): def test_project_filter(self): query = TimeseriesMetricQueryBuilder( self.params, - dataset=Dataset.PerformanceMetrics, + dataset=Dataset.Metrics, interval=900, query=f"project:{self.project.slug}", selected_columns=["p95(transaction.duration)"], @@ -1953,6 +1953,45 @@ def test_no_error_if_aggregates_disallowed_but_no_aggregates_included(self): allow_metric_aggregates=False, ) + def test_on_demand_metrics(self): + query = TimeseriesMetricQueryBuilder( + self.params, + dataset=Dataset.PerformanceMetrics, + interval=900, + query="transaction.duration:>0", + selected_columns=["count()"], + ) + result = query.run_query("test_query") + assert result["data"][:5] == [ + { + "time": self.start.isoformat(), + "count": 0, + }, + { + "time": (self.start + datetime.timedelta(hours=1)).isoformat(), + "count": 0, + }, + { + "time": (self.start + datetime.timedelta(hours=2)).isoformat(), + "count": 0, + }, + { + "time": (self.start + datetime.timedelta(hours=3)).isoformat(), + "count": 0, + }, + { + "time": (self.start + datetime.timedelta(hours=4)).isoformat(), + "count": 0, + }, + ] + self.assertCountEqual( + result["meta"], + [ + {"name": "time", "type": "DateTime('Universal')"}, + {"name": "count", "type": "Float64"}, + ], + ) + class HistogramMetricQueryBuilderTest(MetricBuilderBaseTest): def test_histogram_columns_set_on_builder(self): diff --git a/tests/sentry/snuba/test_extraction.py b/tests/sentry/snuba/test_extraction.py index 917e4e6a245803..675dfbf56e111f 100644 --- a/tests/sentry/snuba/test_extraction.py +++ b/tests/sentry/snuba/test_extraction.py @@ -19,6 +19,7 @@ def test_is_on_demand_query_invalid_query(): assert is_on_demand_query(dataset, "count()", "AND") is False assert is_on_demand_query(dataset, "count()", "(AND transaction.duration:>=1") is False assert is_on_demand_query(dataset, "count()", "transaction.duration:>=abc") is False + assert is_on_demand_query(dataset, "count_if(}", "") is False def test_is_on_demand_query_true():