Skip to content

Commit

Permalink
Revert "feat(alerts): on demand metric time series support (#52867)"
Browse files Browse the repository at this point in the history
This reverts commit f95d541.

Co-authored-by: obostjancic <86684834+obostjancic@users.noreply.github.com>
  • Loading branch information
getsentry-bot and obostjancic committed Jul 18, 2023
1 parent 99b7d19 commit 4856ab5
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 66 deletions.
40 changes: 17 additions & 23 deletions src/sentry/search/events/builder/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@
class MetricsQueryBuilder(QueryBuilder):
requires_organization_condition = True
is_alerts_query = False
is_timeseries_query = False
organization_column: str = "organization_id"

def __init__(
Expand Down Expand Up @@ -99,7 +98,7 @@ def _resolve_on_demand_spec(
self, dataset: Optional[Dataset], selected_cols: List[Optional[str]], query: str
) -> Optional[OndemandMetricSpec]:
field = selected_cols[0] if selected_cols else None
if not field:
if not self.is_alerts_query or not field:
return None

if not is_on_demand_query(dataset, field, query):
Expand All @@ -112,32 +111,30 @@ def _resolve_on_demand_spec(
return None

def _get_on_demand_metrics_query(self) -> Optional[MetricsQuery]:
spec = self._on_demand_spec
if not self.is_performance or not self.is_alerts_query:
return None

# TimeseriesQueryBuilder specific parameters
if self.is_timeseries_query:
limit = None
alias = "count"
else:
limit = self.limit
alias = spec.mri
spec = self._on_demand_spec

return MetricsQuery(
select=[MetricField(spec.op, spec.mri, alias=alias)],
select=[MetricField(spec.op, spec.mri, alias=spec.mri)],
where=[
Condition(
lhs=Column(QUERY_HASH_KEY),
op=Op.EQ,
rhs=spec.query_hash(),
),
],
limit=limit,
# TODO(ogi): groupby and orderby
limit=self.limit,
offset=self.offset,
granularity=self.resolve_granularity(),
is_alerts_query=self.is_alerts_query,
org_id=self.params.organization.id,
project_ids=[p.id for p in self.params.projects],
include_series=True,
# We do not need the series here, as later, we only extract the totals and assign it to the
# request.query
include_series=False,
start=self.params.start,
end=self.params.end,
)
Expand Down Expand Up @@ -771,6 +768,7 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any:
metric_query = transform_mqb_query_to_metrics_query(
self.get_metrics_layer_snql_query().query, self.is_alerts_query
)
# metric_query.where = metric_query_ondemand.where
with sentry_sdk.start_span(op="metric_layer", description="run_query"):
metrics_data = get_series(
projects=self.params.projects,
Expand All @@ -794,7 +792,7 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any:
data.update(group["totals"])
metric_layer_result["data"].append(data)
for meta in metric_layer_result["meta"]:
if data.get(meta["name"]) is None:
if data[meta["name"]] is None:
data[meta["name"]] = self.get_default_value(meta["type"])

return metric_layer_result
Expand Down Expand Up @@ -1040,7 +1038,6 @@ def resolve_split_granularity(self) -> Tuple[List[Condition], Optional[Granulari

class TimeseriesMetricQueryBuilder(MetricsQueryBuilder):
time_alias = "time"
is_timeseries_query = True

def __init__(
self,
Expand Down Expand Up @@ -1183,7 +1180,7 @@ def get_snql_query(self) -> List[Request]:
return queries

def run_query(self, referrer: str, use_cache: bool = False) -> Any:
if self.use_metrics_layer or self._on_demand_spec:
if self.use_metrics_layer:
from sentry.snuba.metrics.datasource import get_series
from sentry.snuba.metrics.mqb_query_transformer import (
transform_mqb_query_to_metrics_query,
Expand All @@ -1192,16 +1189,13 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any:
snuba_query = self.get_snql_query()[0].query
try:
with sentry_sdk.start_span(op="metric_layer", description="transform_query"):
if self._on_demand_spec:
metrics_query = self._get_on_demand_metrics_query()
else:
metrics_query = transform_mqb_query_to_metrics_query(
snuba_query, self.is_alerts_query
)
metric_query = transform_mqb_query_to_metrics_query(
snuba_query, self.is_alerts_query
)
with sentry_sdk.start_span(op="metric_layer", description="run_query"):
metrics_data = get_series(
projects=self.params.projects,
metrics_query=metrics_query,
metrics_query=metric_query,
use_case_id=UseCaseID.TRANSACTIONS
if self.is_performance
else UseCaseID.SESSIONS,
Expand Down
3 changes: 1 addition & 2 deletions src/sentry/snuba/metrics/extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,6 @@
"browser.name",
"os.name",
"geo.country_code",
# event.type is accepted as a valid search field but it will not me converted to a condition for metric extraction
"event.type",
]

Expand Down Expand Up @@ -307,7 +306,7 @@ def __init__(self, field: str, query: str):

# On-demand metrics are implicitly transaction metrics. Remove the
# filter from the query since it can't be translated to a RuleCondition.
self._query = re.sub(r"event\.type:transaction\s*", "", query).strip()
self._query = re.sub(r"event\.type:transaction\s*", "", query)
self._init_aggregate(field)

def _init_aggregate(self, aggregate: str) -> None:
Expand Down
41 changes: 1 addition & 40 deletions tests/sentry/search/events/builder/test_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -1697,7 +1697,7 @@ def test_transaction_in_filter(self):
def test_project_filter(self):
query = TimeseriesMetricQueryBuilder(
self.params,
dataset=Dataset.Metrics,
dataset=Dataset.PerformanceMetrics,
interval=900,
query=f"project:{self.project.slug}",
selected_columns=["p95(transaction.duration)"],
Expand Down Expand Up @@ -1953,45 +1953,6 @@ def test_no_error_if_aggregates_disallowed_but_no_aggregates_included(self):
allow_metric_aggregates=False,
)

def test_on_demand_metrics(self):
query = TimeseriesMetricQueryBuilder(
self.params,
dataset=Dataset.PerformanceMetrics,
interval=900,
query="transaction.duration:>0",
selected_columns=["count()"],
)
result = query.run_query("test_query")
assert result["data"][:5] == [
{
"time": self.start.isoformat(),
"count": 0,
},
{
"time": (self.start + datetime.timedelta(hours=1)).isoformat(),
"count": 0,
},
{
"time": (self.start + datetime.timedelta(hours=2)).isoformat(),
"count": 0,
},
{
"time": (self.start + datetime.timedelta(hours=3)).isoformat(),
"count": 0,
},
{
"time": (self.start + datetime.timedelta(hours=4)).isoformat(),
"count": 0,
},
]
self.assertCountEqual(
result["meta"],
[
{"name": "time", "type": "DateTime('Universal')"},
{"name": "count", "type": "Float64"},
],
)


class HistogramMetricQueryBuilderTest(MetricBuilderBaseTest):
def test_histogram_columns_set_on_builder(self):
Expand Down
1 change: 0 additions & 1 deletion tests/sentry/snuba/test_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ def test_is_on_demand_query_invalid_query():
assert is_on_demand_query(dataset, "count()", "AND") is False
assert is_on_demand_query(dataset, "count()", "(AND transaction.duration:>=1") is False
assert is_on_demand_query(dataset, "count()", "transaction.duration:>=abc") is False
assert is_on_demand_query(dataset, "count_if(}", "") is False


def test_is_on_demand_query_true():
Expand Down

0 comments on commit 4856ab5

Please sign in to comment.