Skip to content

Commit

Permalink
Merge branch 'master' into stabilizes-sentry-app-install-test
Browse files Browse the repository at this point in the history
  • Loading branch information
GabeVillalobos committed Jul 17, 2023
2 parents 1f8a644 + f243492 commit 4333316
Show file tree
Hide file tree
Showing 52 changed files with 522 additions and 107 deletions.
68 changes: 52 additions & 16 deletions bin/mock-replay
Original file line number Diff line number Diff line change
@@ -1,18 +1,29 @@
#!/usr/bin/env python
from io import BytesIO
from zlib import compress

from sentry.runner import configure
from sentry.utils.json import dumps_htmlsafe

configure()
import datetime
import pathlib
import uuid
from datetime import datetime, timedelta

import click
import requests
from django.conf import settings

from sentry.models import File, Organization, Project
from sentry.models import File, Organization, Project, Team
from sentry.replays.models import ReplayRecordingSegment
from sentry.replays.testutils import mock_replay
from sentry.replays.testutils import (
mock_replay,
mock_rrweb_div_helloworld,
mock_segment_console,
mock_segment_fullsnapshot,
mock_segment_init,
mock_segment_nagivation,
)


def store_replay(replay):
Expand All @@ -22,16 +33,28 @@ def store_replay(replay):
assert response.status_code == 200


def create_recording_segment(replay_id, project_id, filename, segment_id):
with open(filename, "rb") as f:
file = File.objects.create(name=filename, type="application/octet-stream")
file.putfile(f)
def create_recording(replay_id, project_id, timestamp):
segments = [
mock_segment_init(timestamp),
mock_segment_fullsnapshot(timestamp, [mock_rrweb_div_helloworld()]),
mock_segment_console(timestamp),
mock_segment_nagivation(timestamp + timedelta(seconds=1), hrefFrom="/", hrefTo="/home/"),
mock_segment_nagivation(
timestamp + timedelta(seconds=2), hrefFrom="/home/", hrefTo="/profile/"
),
]
for (segment_id, segment) in enumerate(segments):
store_replay_segments(replay_id, project_id, segment_id, segment)


def store_replay_segments(replay_id: str, project_id: str, segment_id: int, segment):
f = File.objects.create(name="rr:{segment_id}", type="replay.recording")
f.putfile(BytesIO(compress(dumps_htmlsafe(segment).encode())))
ReplayRecordingSegment.objects.create(
replay_id=replay_id.replace("-", ""),
replay_id=replay_id,
project_id=project_id,
segment_id=segment_id,
file_id=file.id,
file_id=f.id,
)


Expand All @@ -41,7 +64,13 @@ def make_filename(filename: str) -> str:


def main():
project_name = "Replay Test"
project_name = "Replay Test Project"

if not settings.SENTRY_FEATURES["organizations:session-replay"]:
click.echo(
'Session Replays is currently turned off! \nTo enable, add the following line to your local sentry.conf.py file: \nSENTRY_FEATURES["organizations:session-replay"] = True'
)
exit()

if settings.SENTRY_SINGLE_ORGANIZATION:
org = Organization.get_default()
Expand All @@ -51,25 +80,32 @@ def main():
org, _ = Organization.objects.get_or_create(slug="default")

click.echo(f" > Mocking project {project_name}")

team, _ = Team.objects.get_or_create(
organization=org, slug="sentry", defaults={"name": "Sentry"}
)

project, _ = Project.objects.get_or_create(
name=project_name,
defaults={
"organization": org,
"flags": Project.flags.has_replays,
},
platform="javascript",
)

project.add_team(team)

replay_id = uuid.uuid4().hex
seq1_timestamp = datetime.datetime.now() - datetime.timedelta(seconds=22)
seq2_timestamp = datetime.datetime.now() - datetime.timedelta(seconds=5)
seq1_timestamp = datetime.now() - timedelta(seconds=22)
seq2_timestamp = datetime.now() - timedelta(seconds=5)

click.echo("Creating Clickhouse entries...")
click.echo("Creating Replay events entries...")
store_replay(mock_replay(seq1_timestamp, project.id, replay_id, segment_id=0))
store_replay(mock_replay(seq2_timestamp, project.id, replay_id, segment_id=1))

click.echo("Creating Postgres entries...")
create_recording_segment(replay_id, project.id, make_filename("rrweb-1658770770892.json"), 0)
create_recording_segment(replay_id, project.id, make_filename("rrweb-1658770772903.json"), 1)
click.echo("Creating Replay recording entries...")
create_recording(replay_id, project.id, seq1_timestamp)


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion migrations_lockfile.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,5 @@ To resolve this, rebase against latest master and regenerate your migration. Thi
will then be regenerated, and you should be able to merge without conflicts.

nodestore: 0002_nodestore_no_dictfield
sentry: 0513_django_jsonfield
sentry: 0514_migrate_priority_saved_searches
social_auth: 0001_initial
2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -442,7 +442,6 @@ module = [
"sentry.discover.endpoints.discover_key_transactions",
"sentry.discover.endpoints.serializers",
"sentry.discover.tasks",
"sentry.event_manager",
"sentry.eventstore.base",
"sentry.eventstore.compressor",
"sentry.eventstore.models",
Expand Down Expand Up @@ -1156,7 +1155,6 @@ module = [
"tests.sentry.eventstore.test_models",
"tests.sentry.eventstream.kafka.test_protocol",
"tests.sentry.eventstream.test_eventstream",
"tests.sentry.eventtypes.test_default",
"tests.sentry.eventtypes.test_error",
"tests.sentry.features.test_manager",
"tests.sentry.grouping.test_enhancer",
Expand Down
8 changes: 8 additions & 0 deletions src/sentry/api/endpoints/group_hashes.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from sentry.api.serializers import EventSerializer, serialize
from sentry.models import GroupHash
from sentry.tasks.unmerge import unmerge
from sentry.utils import metrics
from sentry.utils.snuba import raw_query


Expand Down Expand Up @@ -61,6 +62,13 @@ def delete(self, request: Request, group) -> Response:
if not hash_list:
return Response()

metrics.incr(
"grouping.unmerge_issues",
sample_rate=1.0,
# We assume that if someone's merged groups, they were all from the same platform
tags={"platform": group.platform or "unknown"},
)

unmerge.delay(
group.project_id, group.id, None, hash_list, request.user.id if request.user else None
)
Expand Down
13 changes: 13 additions & 0 deletions src/sentry/api/endpoints/organization_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,19 @@
Referrer.API_TRACE_VIEW_ERRORS_VIEW.value,
Referrer.API_TRACE_VIEW_HOVER_CARD.value,
Referrer.API_ISSUES_ISSUE_EVENTS.value,
Referrer.API_STARFISH_ENDPOINT_LIST.value,
Referrer.API_STARFISH_GET_SPAN_ACTIONS.value,
Referrer.API_STARFISH_GET_SPAN_DOMAINS.value,
Referrer.API_STARFISH_GET_SPAN_OPERATIONS.value,
Referrer.API_STARFISH_SIDEBAR_SPAN_METRICS.value,
Referrer.API_STARFISH_SPAN_CATEGORY_BREAKDOWN.value,
Referrer.API_STARFISH_SPAN_LIST.value,
Referrer.API_STARFISH_SPAN_SUMMARY_P95.value,
Referrer.API_STARFISH_SPAN_SUMMARY_PAGE.value,
Referrer.API_STARFISH_SPAN_SUMMARY_PANEL.value,
Referrer.API_STARFISH_SPAN_SUMMARY_TRANSACTIONS.value,
Referrer.API_STARFISH_SPAN_TRANSACTION_METRICS.value,
Referrer.API_STARFISH_TOTAL_TIME.value,
}

ALLOWED_EVENTS_GEO_REFERRERS = {
Expand Down
6 changes: 6 additions & 0 deletions src/sentry/api/endpoints/organization_events_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,12 @@
Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_FROZEN_FRAMES_AREA.value,
Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_SLOW_FRAMES.value,
Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_FROZEN_FRAMES.value,
Referrer.API_STARFISH_SPAN_CATEGORY_BREAKDOWN_CHART.value,
Referrer.API_STARFISH_ENDPOINT_OVERVIEW.value,
Referrer.API_STARFISH_HTTP_ERROR_COUNT.value,
Referrer.API_STARFISH_SPAN_SUMMARY_PAGE_CHART.value,
Referrer.API_STARFISH_SIDEBAR_SPAN_METRICS_CHART.value,
Referrer.API_STARFISH_SPAN_TIME_CHARTS.value,
}


Expand Down
23 changes: 23 additions & 0 deletions src/sentry/api/helpers/group_index/update.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
from __future__ import annotations

import re
from collections import defaultdict
from datetime import datetime, timedelta
from typing import Any, Dict, Mapping, MutableMapping, Sequence
from urllib.parse import urlparse

import rest_framework
from django.db import IntegrityError, transaction
Expand Down Expand Up @@ -650,6 +652,27 @@ def update_groups(
if len(projects) > 1:
return Response({"detail": "Merging across multiple projects is not supported"})

referer = urlparse(request.META.get("HTTP_REFERER", "")).path
issue_stream_regex = r"^(\/organizations\/[^\/]+)?\/issues\/$"
similar_issues_tab_regex = r"^(\/organizations\/[^\/]+)?\/issues\/\d+\/similar\/$"

metrics.incr(
"grouping.merge_issues",
sample_rate=1.0,
tags={
# We assume that if someone's merging groups, they're from the same platform
"platform": group_list[0].platform or "unknown",
# TODO: It's probably cleaner to just send this value from the front end
"referer": (
"issue stream"
if re.search(issue_stream_regex, referer)
else "similar issues tab"
if re.search(similar_issues_tab_regex, referer)
else "unknown"
),
},
)

result["merge"] = handle_merge(group_list, project_lookup, acting_user)

inbox = result.get("inbox", None)
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/attachments/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__all__ = ["attachment_cache", "CachedAttachment"]
__all__ = ["attachment_cache", "CachedAttachment", "MissingAttachmentChunks"]

from django.conf import settings

Expand Down
41 changes: 12 additions & 29 deletions src/sentry/event_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
Optional,
Sequence,
Tuple,
Type,
TypedDict,
Union,
cast,
Expand Down Expand Up @@ -55,15 +54,8 @@
from sentry.culprit import generate_culprit
from sentry.dynamic_sampling import LatestReleaseBias, LatestReleaseParams
from sentry.eventstore.processing import event_processing_store
from sentry.eventtypes import (
CspEvent,
DefaultEvent,
ErrorEvent,
ExpectCTEvent,
ExpectStapleEvent,
HpkpEvent,
TransactionEvent,
)
from sentry.eventtypes import EventType
from sentry.eventtypes.transaction import TransactionEvent
from sentry.grouping.api import (
BackgroundGroupingConfigLoader,
GroupingConfig,
Expand Down Expand Up @@ -660,7 +652,7 @@ def save_error_events(
with metrics.timer("event_manager.save_attachments"):
save_attachments(cache_key, attachments, job)

metric_tags = {"from_relay": "_relay_processed" in job["data"]}
metric_tags = {"from_relay": str("_relay_processed" in job["data"])}

metrics.timing(
"events.latency",
Expand Down Expand Up @@ -1260,13 +1252,15 @@ def _tsdb_record_all_metrics(jobs: Sequence[Job]) -> None:
records.append((TSDBModel.users_affected_by_project, project_id, (user.tag_value,)))

if incrs:
tsdb.incr_multi(incrs, timestamp=event.datetime, environment_id=environment.id)
tsdb.backend.incr_multi(incrs, timestamp=event.datetime, environment_id=environment.id)

if records:
tsdb.record_multi(records, timestamp=event.datetime, environment_id=environment.id)
tsdb.backend.record_multi(
records, timestamp=event.datetime, environment_id=environment.id
)

if frequencies:
tsdb.record_frequency_multi(frequencies, timestamp=event.datetime)
tsdb.backend.record_frequency_multi(frequencies, timestamp=event.datetime)


@metrics.wraps("save_event.nodestore_save_many")
Expand Down Expand Up @@ -1438,17 +1432,6 @@ def _get_event_user_impl(
return euser


EventType = Union[
DefaultEvent,
ErrorEvent,
CspEvent,
HpkpEvent,
ExpectCTEvent,
ExpectStapleEvent,
TransactionEvent,
]


def get_event_type(data: Mapping[str, Any]) -> EventType:
return eventtypes.get(data.get("type", "default"))()

Expand Down Expand Up @@ -1922,7 +1905,7 @@ def _process_existing_aggregate(
return bool(is_regression)


Attachment = Type[CachedAttachment]
Attachment = CachedAttachment


def discard_event(job: Job, attachments: Sequence[Attachment]) -> None:
Expand All @@ -1938,7 +1921,7 @@ def discard_event(job: Job, attachments: Sequence[Attachment]) -> None:

project = job["event"].project

quotas.refund(
quotas.backend.refund(
project,
key=job["project_key"],
timestamp=job["start_time"],
Expand Down Expand Up @@ -1975,7 +1958,7 @@ def discard_event(job: Job, attachments: Sequence[Attachment]) -> None:
)

if attachment_quantity:
quotas.refund(
quotas.backend.refund(
project,
key=job["project_key"],
timestamp=job["start_time"],
Expand Down Expand Up @@ -2099,7 +2082,7 @@ def filter_attachments_for_group(attachments: list[Attachment], job: Job) -> lis
cache.set(crashreports_key, max_crashreports, CRASH_REPORT_TIMEOUT)

if refund_quantity:
quotas.refund(
quotas.backend.refund(
project,
key=job["project_key"],
timestamp=job["start_time"],
Expand Down
12 changes: 12 additions & 0 deletions src/sentry/eventtypes/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from typing import Union

from .base import DefaultEvent
from .error import ErrorEvent
from .generic import GenericEvent
Expand All @@ -17,3 +19,13 @@

get = default_manager.get
register = default_manager.register

EventType = Union[
DefaultEvent,
ErrorEvent,
CspEvent,
HpkpEvent,
ExpectCTEvent,
ExpectStapleEvent,
TransactionEvent,
]
Loading

0 comments on commit 4333316

Please sign in to comment.