Skip to content

Commit

Permalink
chore(devexp): cleanup dead code (again) (#5836)
Browse files Browse the repository at this point in the history
When I merged #5793 it conflicted with the new tests which were added and caused mypy issues. We should be able to merge this now



* Revert "Revert "feat(devexp): remove dead code (#5793)""

This reverts commit 884c757.

* fix mypy
  • Loading branch information
volokluev committed Apr 30, 2024
1 parent 4f20c55 commit cd42255
Show file tree
Hide file tree
Showing 35 changed files with 53 additions and 630 deletions.
2 changes: 1 addition & 1 deletion snuba/admin/production_queries/prod_queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def _validate_projects_in_query(body: Dict[str, Any], dataset: Dataset) -> None:
return

request_parts = RequestSchema.build(HTTPQuerySettings).validate(body)
query = parse_snql_query(request_parts.query["query"], dataset)[0]
query = parse_snql_query(request_parts.query["query"], dataset)
project_ids = get_object_ids_in_query_ast(query, "project_id")
if project_ids == set():
raise InvalidQueryException("Missing project ID")
Expand Down
9 changes: 2 additions & 7 deletions snuba/query/mql/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
FilterInSelectOptimizer,
)
from snuba.query.query_settings import HTTPQuerySettings, QuerySettings
from snuba.query.snql.anonymize import format_snql_anonymized
from snuba.query.snql.parser import (
MAX_LIMIT,
POST_PROCESSORS,
Expand Down Expand Up @@ -1076,7 +1075,7 @@ def parse_mql_query(
dataset: Dataset,
custom_processing: Optional[CustomProcessors] = None,
settings: QuerySettings | None = None,
) -> Tuple[Union[CompositeQuery[QueryEntity], LogicalQuery], str]:
) -> Union[CompositeQuery[QueryEntity], LogicalQuery]:
with sentry_sdk.start_span(op="parser", description="parse_mql_query_initial"):
query = parse_mql_query_body(body, dataset)
with sentry_sdk.start_span(
Expand All @@ -1095,10 +1094,6 @@ def parse_mql_query(
with sentry_sdk.start_span(op="processor", description="treeify_conditions"):
_post_process(query, [_treeify_or_and_conditions], settings)

# TODO: Figure out what to put for the anonymized string
with sentry_sdk.start_span(op="parser", description="anonymize_snql_query"):
snql_anonymized = format_snql_anonymized(query).get_sql()

with sentry_sdk.start_span(op="processor", description="post_processors"):
_post_process(
query,
Expand Down Expand Up @@ -1126,4 +1121,4 @@ def parse_mql_query(
with sentry_sdk.start_span(op="validate", description="expression_validators"):
_post_process(query, VALIDATORS)

return query, snql_anonymized
return query
2 changes: 0 additions & 2 deletions snuba/query/parser/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,7 @@ class PostProcessingError(Exception):
def __init__(
self,
query: Query | CompositeQuery[Entity],
snql_anonymized: str,
message: str | None = None,
):
super().__init__(message)
self.query = query
self.snql_anonymized = snql_anonymized
124 changes: 0 additions & 124 deletions snuba/query/snql/anonymize.py

This file was deleted.

11 changes: 3 additions & 8 deletions snuba/query/snql/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@
from snuba.query.parser.exceptions import ParsingException, PostProcessingError
from snuba.query.query_settings import QuerySettings
from snuba.query.schema import POSITIVE_OPERATORS
from snuba.query.snql.anonymize import format_snql_anonymized
from snuba.query.snql.discover_entity_selection import select_discover_entity
from snuba.query.snql.expression_visitor import (
HighPriArithmetic,
Expand Down Expand Up @@ -1503,10 +1502,9 @@ def parse_snql_query(
dataset: Dataset,
custom_processing: Optional[CustomProcessors] = None,
settings: QuerySettings | None = None,
) -> Tuple[Union[CompositeQuery[QueryEntity], LogicalQuery], str]:
) -> Union[CompositeQuery[QueryEntity], LogicalQuery]:
with sentry_sdk.start_span(op="parser", description="parse_snql_query_initial"):
query = parse_snql_query_initial(body)
snql_anonymized = ""

if settings and settings.get_dry_run():
explain_meta.set_original_ast(str(query))
Expand All @@ -1518,9 +1516,6 @@ def parse_snql_query(
with sentry_sdk.start_span(op="processor", description="treeify_conditions"):
_post_process(query, [_treeify_or_and_conditions], settings)

with sentry_sdk.start_span(op="parser", description="anonymize_snql_query"):
snql_anonymized = format_snql_anonymized(query).get_sql()

with sentry_sdk.start_span(op="processor", description="post_processors"):
_post_process(
query,
Expand All @@ -1544,8 +1539,8 @@ def parse_snql_query(
# Validating
with sentry_sdk.start_span(op="validate", description="expression_validators"):
_post_process(query, VALIDATORS)
return query, snql_anonymized
return query
except InvalidQueryException:
raise
except Exception:
raise PostProcessingError(query, snql_anonymized)
raise PostProcessingError(query)
2 changes: 1 addition & 1 deletion snuba/querylog/query_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,7 @@ def __init__(
self.entity = entity_name
self.query_list: MutableSequence[ClickhouseQueryMetadata] = []
self.projects = ProjectsFinder().visit(request.query)
self.snql_anonymized = request.snql_anonymized
self.snql_anonymized = ""
else:
self.start_timestamp = start_timestamp
self.end_timestamp = end_timestamp
Expand Down
3 changes: 0 additions & 3 deletions snuba/request/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,6 @@ class Request:
query_settings: QuerySettings
attribution_info: AttributionInfo

# TODO: This should maybe not live on the request
snql_anonymized: str

@property
def referrer(self) -> str:
return self.attribution_info.referrer
21 changes: 7 additions & 14 deletions snuba/request/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import random
import textwrap
import uuid
from typing import Any, Dict, MutableMapping, Optional, Protocol, Tuple, Type, Union
from typing import Any, Dict, MutableMapping, Optional, Protocol, Type, Union

import sentry_sdk

Expand Down Expand Up @@ -44,7 +44,7 @@ def __call__(
settings: QuerySettings,
dataset: Dataset,
custom_processing: Optional[CustomProcessors] = ...,
) -> Tuple[Union[Query, CompositeQuery[Entity]], str]:
) -> Union[Query, CompositeQuery[Entity]]:
...


Expand All @@ -53,7 +53,7 @@ def parse_snql_query(
settings: QuerySettings,
dataset: Dataset,
custom_processing: Optional[CustomProcessors] = None,
) -> Tuple[Union[Query, CompositeQuery[Entity]], str]:
) -> Union[Query, CompositeQuery[Entity]]:
return _parse_snql_query(
request_parts.query["query"], dataset, custom_processing, settings
)
Expand All @@ -64,7 +64,7 @@ def parse_mql_query(
settings: QuerySettings,
dataset: Dataset,
custom_processing: Optional[CustomProcessors] = None,
) -> Tuple[Union[Query, CompositeQuery[Entity]], str]:
) -> Union[Query, CompositeQuery[Entity]]:
return _parse_mql_query(
request_parts.query["query"],
request_parts.query["mql_context"],
Expand Down Expand Up @@ -121,24 +121,19 @@ def build_request(
referrer = _get_referrer(request_parts, referrer)
settings_obj = _get_settings_object(settings_class, request_parts, referrer)
try:
query, snql_anonymized = parser(
request_parts, settings_obj, dataset, custom_processing
)
query = parser(request_parts, settings_obj, dataset, custom_processing)
except PostProcessingError as exception:
query = exception.query
snql_anonymized = exception.snql_anonymized
request = _build_request(
body, request_parts, referrer, settings_obj, query, snql_anonymized
body, request_parts, referrer, settings_obj, query
)
query_metadata = SnubaQueryMetadata(
request, get_dataset_name(dataset), timer
)
state.record_query(query_metadata.to_dict())
raise

request = _build_request(
body, request_parts, referrer, settings_obj, query, snql_anonymized
)
request = _build_request(body, request_parts, referrer, settings_obj, query)
except (InvalidJsonRequestException, InvalidQueryException) as exception:
request_status = get_request_status(exception)
record_invalid_request(
Expand Down Expand Up @@ -239,7 +234,6 @@ def _build_request(
referrer: str,
settings: QuerySettings,
query: Query | CompositeQuery[Entity],
snql_anonymized: str,
) -> Request:
org_ids = get_object_ids_in_query_ast(query, "org_id")
if org_ids is not None and len(org_ids) == 1:
Expand All @@ -257,5 +251,4 @@ def _build_request(
query=query,
attribution_info=attribution_info,
query_settings=settings,
snql_anonymized=snql_anonymized,
)
4 changes: 2 additions & 2 deletions tests/clickhouse/query_dsl/test_project_id.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,14 +181,14 @@ def test_find_projects(
with pytest.raises(ValidationException):
request = json_to_snql(query_body, "events")
request.validate()
query, _ = parse_snql_query(str(request.query), events)
query = parse_snql_query(str(request.query), events)
assert isinstance(query, Query)
run_entity_validators(query, HTTPQuerySettings())
identity_translate(query)
else:
request = json_to_snql(query_body, "events")
request.validate()
query, _ = parse_snql_query(str(request.query), events)
query = parse_snql_query(str(request.query), events)
assert isinstance(query, Query)
run_entity_validators(query, HTTPQuerySettings())
translated_query = identity_translate(query)
Expand Down
2 changes: 1 addition & 1 deletion tests/clickhouse/query_dsl/test_time_range.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def test_get_time_range() -> None:

events = get_dataset("events")
entity = get_entity(EntityKey.EVENTS)
query, _ = parse_snql_query(body, events)
query = parse_snql_query(body, events)
assert isinstance(query, Query)
processors = entity.get_query_processors()
for processor in processors:
Expand Down
4 changes: 2 additions & 2 deletions tests/datasets/entities/storage_selectors/test_errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def test_query_storage_selector(
use_readable: bool,
expected_storage: Storage,
) -> None:
query, _ = parse_snql_query(str(snql_query), dataset)
query = parse_snql_query(str(snql_query), dataset)
assert isinstance(query, Query)

if use_readable:
Expand All @@ -127,7 +127,7 @@ def test_query_storage_selector(


def test_assert_raises() -> None:
query, _ = parse_snql_query(
query = parse_snql_query(
"""
MATCH (events)
SELECT event_id
Expand Down
4 changes: 2 additions & 2 deletions tests/datasets/entities/storage_selectors/test_selector.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def test_default_query_storage_selector(
selector: QueryStorageSelector,
expected_storage: Storage,
) -> None:
query, _ = parse_snql_query(str(snql_query), dataset)
query = parse_snql_query(str(snql_query), dataset)
assert isinstance(query, Query)

selected_storage = selector.select_storage(
Expand All @@ -76,7 +76,7 @@ def test_default_query_storage_selector(


def test_assert_raises() -> None:
query, _ = parse_snql_query(
query = parse_snql_query(
""" MATCH (generic_metrics_sets)
SELECT uniq(value) AS unique_values BY project_id, org_id
WHERE org_id = 1
Expand Down
4 changes: 2 additions & 2 deletions tests/datasets/entities/storage_selectors/test_sessions.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def test_query_storage_selector(
selector: QueryStorageSelector,
expected_storage: Storage,
) -> None:
query, _ = parse_snql_query(str(snql_query), dataset)
query = parse_snql_query(str(snql_query), dataset)
assert isinstance(query, Query)
selected_storage = selector.select_storage(
query, SubscriptionQuerySettings(), storage_connections
Expand All @@ -79,7 +79,7 @@ def test_query_storage_selector(


def test_assert_raises() -> None:
query, _ = parse_snql_query(
query = parse_snql_query(
"""
MATCH (events)
SELECT event_id
Expand Down
Loading

0 comments on commit cd42255

Please sign in to comment.