Skip to content

Commit

Permalink
Revert "feat(errors): process trace_sampled and num_processing_errors…
Browse files Browse the repository at this point in the history
… from event (#4332)"

This reverts commit 2705677.

Co-authored-by: barkbarkimashark <101606877+barkbarkimashark@users.noreply.github.com>
  • Loading branch information
getsentry-bot and barkbarkimashark committed Jun 16, 2023
1 parent 2705677 commit f035b63
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 136 deletions.
7 changes: 0 additions & 7 deletions snuba/datasets/processors/errors_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,10 +143,6 @@ def process_insert(

self.extract_stacktraces(processed, stacks, threads)

processing_errors = data.get("errors", None)
if processing_errors is not None and isinstance(processing_errors, list):
processed["num_processing_errors"] = len(processing_errors)

processed["offset"] = metadata.offset
processed["partition"] = metadata.partition
processed["message_timestamp"] = metadata.timestamp
Expand Down Expand Up @@ -266,7 +262,6 @@ def extract_promoted_contexts(
transaction_ctx = contexts.get("trace") or {}
trace_id = transaction_ctx.get("trace_id", None)
span_id = transaction_ctx.get("span_id", None)
trace_sampled = transaction_ctx.get("sampled", None)

replay_ctx = contexts.get("replay") or {}
replay_id = replay_ctx.get("replay_id", None)
Expand All @@ -282,8 +277,6 @@ def extract_promoted_contexts(
output["trace_id"] = str(uuid.UUID(trace_id))
if span_id:
output["span_id"] = int(span_id, 16)
if trace_sampled:
output["trace_sampled"] = bool(trace_sampled)

def extract_common(
self,
Expand Down
130 changes: 1 addition & 129 deletions tests/datasets/test_errors_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import uuid
from dataclasses import dataclass
from datetime import datetime, timedelta
from typing import Any, Mapping, Sequence
from typing import Any, Mapping
from uuid import UUID

import pytest
Expand Down Expand Up @@ -33,11 +33,9 @@ class ErrorEvent:
geo: Mapping[str, str]
threads: Mapping[str, Any] | None
trace_id: str
trace_sampled: bool | None
environment: str
replay_id: uuid.UUID | None
received_timestamp: datetime
errors: Sequence[Mapping[str, Any]] | None

def serialize(self) -> tuple[int, str, Mapping[str, Any]]:
serialized_event: dict[str, Any] = {
Expand Down Expand Up @@ -254,12 +252,6 @@ def serialize(self) -> tuple[int, str, Mapping[str, Any]]:
}
if self.threads:
serialized_event["data"]["threads"] = self.threads
if self.trace_sampled:
serialized_event["data"]["contexts"]["trace"][
"sampled"
] = self.trace_sampled
if self.errors:
serialized_event["data"]["errors"] = self.errors

return (
2,
Expand Down Expand Up @@ -387,10 +379,6 @@ def build_result(self, meta: KafkaMessageMetadata) -> Mapping[str, Any]:
expected_result["tags.key"].insert(4, "replayId")
expected_result["tags.value"].insert(4, self.replay_id.hex)

if self.trace_sampled:
expected_result["contexts.key"].insert(5, "trace.sampled")
expected_result["contexts.value"].insert(5, str(self.trace_sampled))

return expected_result


Expand Down Expand Up @@ -422,7 +410,6 @@ def __get_error_event(self, timestamp: datetime, recieved: datetime) -> ErrorEve
platform="python",
message="",
trace_id=str(uuid.uuid4()),
trace_sampled=False,
timestamp=timestamp,
received_timestamp=recieved,
threads=None,
Expand All @@ -440,7 +427,6 @@ def __get_error_event(self, timestamp: datetime, recieved: datetime) -> ErrorEve
"city": "fake_city",
"subdivision": "fake_subdivision",
},
errors=None,
)

def test_errors_basic(self) -> None:
Expand Down Expand Up @@ -472,7 +458,6 @@ def test_errors_replayid_context(self) -> None:
platform="python",
message="",
trace_id=str(uuid.uuid4()),
trace_sampled=False,
timestamp=timestamp,
received_timestamp=recieved,
threads=None,
Expand All @@ -490,7 +475,6 @@ def test_errors_replayid_context(self) -> None:
"subdivision": "fake_subdivision",
},
replay_id=uuid.uuid4(),
errors=None,
)

payload = message.serialize()
Expand All @@ -510,7 +494,6 @@ def test_errors_replayid_tag(self) -> None:
platform="python",
message="",
trace_id=str(uuid.uuid4()),
trace_sampled=False,
timestamp=timestamp,
threads=None,
received_timestamp=recieved,
Expand All @@ -528,7 +511,6 @@ def test_errors_replayid_tag(self) -> None:
"subdivision": "fake_subdivision",
},
replay_id=None,
errors=None,
)
replay_id = uuid.uuid4()
payload = message.serialize()
Expand All @@ -555,7 +537,6 @@ def test_errors_replayid_tag_and_context(self) -> None:
platform="python",
message="",
trace_id=str(uuid.uuid4()),
trace_sampled=False,
timestamp=timestamp,
received_timestamp=recieved,
release="1.0.0",
Expand All @@ -573,7 +554,6 @@ def test_errors_replayid_tag_and_context(self) -> None:
"subdivision": "fake_subdivision",
},
replay_id=replay_id,
errors=None,
)

payload = message.serialize()
Expand All @@ -597,7 +577,6 @@ def test_errors_replayid_invalid_tag(self) -> None:
platform="python",
message="",
trace_id=str(uuid.uuid4()),
trace_sampled=False,
timestamp=timestamp,
received_timestamp=recieved,
threads=None,
Expand All @@ -615,7 +594,6 @@ def test_errors_replayid_invalid_tag(self) -> None:
"subdivision": "fake_subdivision",
},
replay_id=None,
errors=None,
)
invalid_replay_id = "imnotavaliduuid"
payload = message.serialize()
Expand All @@ -641,7 +619,6 @@ def test_exception_main_thread_true(self) -> None:
platform="python",
message="",
trace_id=str(uuid.uuid4()),
trace_sampled=False,
timestamp=timestamp,
received_timestamp=recieved,
release="1.0.0",
Expand Down Expand Up @@ -673,7 +650,6 @@ def test_exception_main_thread_true(self) -> None:
},
]
},
errors=None,
)
payload = message.serialize()
meta = KafkaMessageMetadata(offset=2, partition=2, timestamp=timestamp)
Expand All @@ -695,7 +671,6 @@ def test_exception_main_thread_false(self) -> None:
platform="python",
message="",
trace_id=str(uuid.uuid4()),
trace_sampled=False,
timestamp=timestamp,
received_timestamp=recieved,
release="1.0.0",
Expand Down Expand Up @@ -727,7 +702,6 @@ def test_exception_main_thread_false(self) -> None:
},
]
},
errors=None,
)
payload = message.serialize()
meta = KafkaMessageMetadata(offset=2, partition=2, timestamp=timestamp)
Expand All @@ -738,105 +712,3 @@ def test_exception_main_thread_false(self) -> None:
assert self.processor.process_message(payload, meta) == InsertBatch(
[result], None
)

def test_trace_sampled(self) -> None:
timestamp, recieved = self.__get_timestamps()
message = ErrorEvent(
event_id=str(uuid.UUID("dcb9d002cac548c795d1c9adbfc68040")),
organization_id=1,
project_id=2,
group_id=100,
platform="python",
message="",
trace_id=str(uuid.uuid4()),
trace_sampled=True,
timestamp=timestamp,
received_timestamp=recieved,
release="1.0.0",
dist="dist",
environment="prod",
email="foo@bar.com",
ip_address="127.0.0.1",
user_id="myself",
username="me",
geo={
"country_code": "XY",
"region": "fake_region",
"city": "fake_city",
"subdivision": "fake_subdivision",
},
replay_id=None,
threads=None,
errors=None,
)
payload = message.serialize()
meta = KafkaMessageMetadata(offset=2, partition=2, timestamp=timestamp)

result = message.build_result(meta)
result["trace_sampled"] = True

assert self.processor.process_message(payload, meta) == InsertBatch(
[result], None
)

# verify processing trace.sampled=None works as it did before
message.trace_sampled = None
payload = message.serialize()
meta = KafkaMessageMetadata(offset=2, partition=2, timestamp=timestamp)

result2 = message.build_result(meta)

assert self.processor.process_message(payload, meta) == InsertBatch(
[result2], None
)

def test_errors_processed(self) -> None:
timestamp, recieved = self.__get_timestamps()
message = ErrorEvent(
event_id=str(uuid.UUID("dcb9d002cac548c795d1c9adbfc68040")),
organization_id=1,
project_id=2,
group_id=100,
platform="python",
message="",
trace_id=str(uuid.uuid4()),
trace_sampled=False,
timestamp=timestamp,
received_timestamp=recieved,
release="1.0.0",
dist="dist",
environment="prod",
email="foo@bar.com",
ip_address="127.0.0.1",
user_id="myself",
username="me",
geo={
"country_code": "XY",
"region": "fake_region",
"city": "fake_city",
"subdivision": "fake_subdivision",
},
replay_id=None,
threads=None,
errors=[{"type": "one"}, {"type": "two"}, {"type": "three"}],
)
payload = message.serialize()
meta = KafkaMessageMetadata(offset=2, partition=2, timestamp=timestamp)

result = message.build_result(meta)
result["num_processing_errors"] = 3

assert self.processor.process_message(payload, meta) == InsertBatch(
[result], None
)

# ensure old behavior where data.errors=None won't set 'num_processing_errors'
message.errors = None
payload = message.serialize()
meta = KafkaMessageMetadata(offset=2, partition=2, timestamp=timestamp)

result = message.build_result(meta)

assert self.processor.process_message(payload, meta) == InsertBatch(
[result], None
)

0 comments on commit f035b63

Please sign in to comment.