diff --git a/package.json b/package.json index 5cc54373eb0e2f..703e7e8213099e 100644 --- a/package.json +++ b/package.json @@ -165,7 +165,7 @@ "style-loader": "^3.3.1", "ts-node": "^10.9.1", "tslib": "^2.6.2", - "typescript": "^5.2.2", + "typescript": "^5.3.2", "u2f-api": "1.0.10", "url-loader": "^4.1.0", "webpack": "5.87.0", diff --git a/pyproject.toml b/pyproject.toml index 860674a5d4bd9a..0a6e4e0f1e8cd2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -258,6 +258,7 @@ module = [ "sentry.api.endpoints.user_identity_config", "sentry.api.endpoints.user_index", "sentry.api.endpoints.user_notification_details", + "sentry.api.endpoints.user_notification_fine_tuning", "sentry.api.endpoints.user_permission_details", "sentry.api.endpoints.user_permissions", "sentry.api.endpoints.user_permissions_config", diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index c8df8bdbd4f235..45346e0adec5f2 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -186,7 +186,7 @@ sortedcontainers==2.4.0 soupsieve==2.3.2.post1 sqlparse==0.4.4 statsd==3.3 -stripe==2.61.0 +stripe==3.1.0 structlog==22.1.0 symbolic==12.7.0 time-machine==2.13.0 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 493123aa51953e..31a8fde12d69c6 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -125,7 +125,7 @@ snuba-sdk==2.0.9 soupsieve==2.3.2.post1 sqlparse==0.4.4 statsd==3.3 -stripe==2.61.0 +stripe==3.1.0 structlog==22.1.0 symbolic==12.7.0 toronado==0.1.0 diff --git a/requirements-getsentry.txt b/requirements-getsentry.txt index 20a275817c57c7..4c33ababea24db 100644 --- a/requirements-getsentry.txt +++ b/requirements-getsentry.txt @@ -11,4 +11,4 @@ PlanOut==0.6.0 pycountry==17.5.14 pyvat==1.3.15 reportlab==3.6.13 -stripe==2.61.0 +stripe==3.1.0 diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/installation/index.py b/src/sentry/api/endpoints/integrations/sentry_apps/installation/index.py index 17dfb054b1db9e..a1d508043456fd 100644 --- a/src/sentry/api/endpoints/integrations/sentry_apps/installation/index.py +++ b/src/sentry/api/endpoints/integrations/sentry_apps/installation/index.py @@ -1,4 +1,3 @@ -from django.utils.translation import gettext_lazy as _ from rest_framework import serializers from rest_framework.request import Request from rest_framework.response import Response @@ -6,6 +5,7 @@ from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import control_silo_endpoint from sentry.api.bases import SentryAppInstallationsBaseEndpoint +from sentry.api.fields.sentry_slug import SentrySlugField from sentry.api.paginator import OffsetPaginator from sentry.api.serializers import serialize from sentry.constants import SENTRY_APP_SLUG_MAX_LENGTH @@ -14,21 +14,7 @@ class SentryAppInstallationsSerializer(serializers.Serializer): - slug = serializers.RegexField( - r"^[a-z0-9_\-]+$", - max_length=SENTRY_APP_SLUG_MAX_LENGTH, - error_messages={ - "invalid": _( - "Enter a valid slug consisting of lowercase letters, " - "numbers, underscores or hyphens." - ) - }, - ) - - def validate(self, attrs): - if not attrs.get("slug"): - raise serializers.ValidationError("Sentry App slug is required") - return attrs + slug = SentrySlugField(required=True, max_length=SENTRY_APP_SLUG_MAX_LENGTH) @control_silo_endpoint diff --git a/src/sentry/api/endpoints/user_notification_details.py b/src/sentry/api/endpoints/user_notification_details.py index 52e0c61cffbd22..6df31b737f96d6 100644 --- a/src/sentry/api/endpoints/user_notification_details.py +++ b/src/sentry/api/endpoints/user_notification_details.py @@ -7,22 +7,18 @@ from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import control_silo_endpoint from sentry.api.bases.user import UserEndpoint +from sentry.api.fields.empty_integer import EmptyIntegerField from sentry.api.serializers import Serializer, serialize +from sentry.models.notificationsetting import NotificationSetting from sentry.models.options.user_option import UserOption -from sentry.notifications.types import UserOptionsSettingsKey - -USER_OPTION_SETTINGS = { - UserOptionsSettingsKey.SELF_ACTIVITY: { - "key": "self_notifications", - "default": "0", - "type": bool, - }, - UserOptionsSettingsKey.SELF_ASSIGN: { - "key": "self_assign_issue", - "default": "0", - "type": bool, - }, -} +from sentry.notifications.types import NotificationScopeType, UserOptionsSettingsKey +from sentry.notifications.utils.legacy_mappings import ( + USER_OPTION_SETTINGS, + get_option_value_from_int, + get_type_from_user_option_settings_key, + map_notification_settings_to_legacy, +) +from sentry.types.integrations import ExternalProviders class UserNotificationsSerializer(Serializer): @@ -32,6 +28,19 @@ def get_attrs(self, item_list, user, *args, **kwargs): ).select_related("user") keys_to_user_option_objects = {user_option.key: user_option for user_option in user_options} + notification_settings = NotificationSetting.objects._filter( + ExternalProviders.EMAIL, + scope_type=NotificationScopeType.USER, + user_ids=[user.id for user in item_list], + ) + notification_settings_as_user_options = map_notification_settings_to_legacy( + notification_settings, user_mapping={user.id: user for user in item_list} + ) + + # Override deprecated UserOption rows with NotificationSettings. + for user_option in notification_settings_as_user_options: + keys_to_user_option_objects[user_option.key] = user_option + results = defaultdict(list) for user_option in keys_to_user_option_objects.values(): results[user_option.user].append(user_option) @@ -48,13 +57,21 @@ def serialize(self, obj, attrs, user, *args, **kwargs): elif uo["type"] == int: data[key.value] = int(val) + data["weeklyReports"] = True # This cannot be overridden + return data -# only expose legacy options class UserNotificationDetailsSerializer(serializers.Serializer): + deployNotifications = EmptyIntegerField( + required=False, min_value=2, max_value=4, allow_null=True + ) personalActivityNotifications = serializers.BooleanField(required=False) selfAssignOnResolve = serializers.BooleanField(required=False) + subscribeByDefault = serializers.BooleanField(required=False) + workflowNotifications = EmptyIntegerField( + required=False, min_value=0, max_value=2, allow_null=True + ) @control_silo_endpoint @@ -83,12 +100,22 @@ def put(self, request: Request, user) -> Response: status=status.HTTP_400_BAD_REQUEST, ) - user_option, _ = UserOption.objects.get_or_create( - key=USER_OPTION_SETTINGS[key]["key"], - user=user, - project_id=None, - organization_id=None, - ) - user_option.update(value=str(int(value))) + type = get_type_from_user_option_settings_key(key) + if type: + NotificationSetting.objects.update_settings( + ExternalProviders.EMAIL, + type, + get_option_value_from_int(type, int(value)), + user_id=user.id, + ) + else: + # Legacy user options which does not include weekly report + user_option, _ = UserOption.objects.get_or_create( + key=USER_OPTION_SETTINGS[key]["key"], + user=user, + project_id=None, + organization_id=None, + ) + user_option.update(value=str(int(value))) return self.get(request, user) diff --git a/src/sentry/api/endpoints/user_notification_email.py b/src/sentry/api/endpoints/user_notification_email.py deleted file mode 100644 index 1b3f3d3e66fc26..00000000000000 --- a/src/sentry/api/endpoints/user_notification_email.py +++ /dev/null @@ -1,66 +0,0 @@ -from django.db import router, transaction -from rest_framework import status -from rest_framework.request import Request -from rest_framework.response import Response - -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import control_silo_endpoint -from sentry.api.bases.user import UserEndpoint -from sentry.models.options.user_option import UserOption -from sentry.models.useremail import UserEmail - -INVALID_EMAIL_MSG = ( - "Invalid email value(s) provided. Email values must be verified emails for the given user." -) - - -@control_silo_endpoint -class UserNotificationEmailEndpoint(UserEndpoint): - publish_status = { - "GET": ApiPublishStatus.PRIVATE, - "PUT": ApiPublishStatus.PRIVATE, - } - owner = ApiOwner.ISSUES - - def get(self, request: Request, user) -> Response: - """ - Fetches the user's email notification settings. - Returns a dictionary where the keys are the IDs of the projects - and the values are the email addresses to be used for notifications for that project. - """ - email_options = UserOption.objects.filter( - key="mail:email", user=user, project_id__isnull=False - ).select_related("user") - - return self.respond({str(option.project_id): option.value for option in email_options}) - - def put(self, request: Request, user) -> Response: - """ - Updates the user's email notification settings. - The request data should be a dictionary where the keys are the IDs of the projects - and the values are the email addresses to be used for notifications for that project. - All email addresses must be verified and belong to the user. - If any email address is not verified or does not belong to the user, a 400 error is returned. - If the update is successful, a 204 status code is returned. - """ - data = request.data - - # Make sure target emails exist and are verified - emails_to_check = set(data.values()) - emails = UserEmail.objects.filter(user=user, email__in=emails_to_check, is_verified=True) - - # TODO(mgaeta): Is there a better way to check this? - if len(emails) != len(emails_to_check): - return Response({"detail": INVALID_EMAIL_MSG}, status=status.HTTP_400_BAD_REQUEST) - - with transaction.atomic(using=router.db_for_write(UserOption)): - for id, value in data.items(): - user_option, CREATED = UserOption.objects.get_or_create( - user=user, - key="mail:email", - project_id=int(id), - ) - user_option.update(value=str(value)) - - return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/src/sentry/api/endpoints/user_notification_fine_tuning.py b/src/sentry/api/endpoints/user_notification_fine_tuning.py new file mode 100644 index 00000000000000..9d17e829d8d4b9 --- /dev/null +++ b/src/sentry/api/endpoints/user_notification_fine_tuning.py @@ -0,0 +1,208 @@ +from typing import Any, Mapping + +from django.db import router, transaction +from rest_framework import status +from rest_framework.request import Request +from rest_framework.response import Response + +from sentry.api.api_publish_status import ApiPublishStatus +from sentry.api.base import control_silo_endpoint +from sentry.api.bases.user import UserEndpoint +from sentry.api.serializers import serialize +from sentry.api.serializers.models import UserNotificationsSerializer +from sentry.models.notificationsetting import NotificationSetting +from sentry.models.notificationsettingoption import NotificationSettingOption +from sentry.models.options.user_option import UserOption +from sentry.models.useremail import UserEmail +from sentry.notifications.types import ( + FineTuningAPIKey, + NotificationScopeEnum, + NotificationSettingEnum, + NotificationSettingsOptionEnum, +) +from sentry.notifications.utils.legacy_mappings import ( + get_option_value_from_int, + get_type_from_fine_tuning_key, +) +from sentry.services.hybrid_cloud.user.service import user_service +from sentry.types.integrations import ExternalProviders + +INVALID_EMAIL_MSG = ( + "Invalid email value(s) provided. Email values must be verified emails for the given user." +) +INVALID_USER_MSG = ( + "User does not belong to at least one of the requested organizations (org_id: %s)." +) + + +@control_silo_endpoint +class UserNotificationFineTuningEndpoint(UserEndpoint): + publish_status = { + "GET": ApiPublishStatus.UNKNOWN, + "PUT": ApiPublishStatus.UNKNOWN, + } + + def get(self, request: Request, user, notification_type) -> Response: + try: + notification_type = FineTuningAPIKey(notification_type) + except ValueError: + return Response( + {"detail": "Unknown notification type: %s." % notification_type}, + status=status.HTTP_404_NOT_FOUND, + ) + + notifications = UserNotificationsSerializer() + return Response( + serialize( + user, + request.user, + notifications, + notification_type=notification_type, + ) + ) + + def put(self, request: Request, user, notification_type) -> Response: + """ + Update user notification options + ```````````````````````````````` + + Updates user's notification options on a per project or organization + basis. Expected payload is a map/dict whose key is a project or org id + and value varies depending on `notification_type`. + + For `alerts`, `workflow`, `email` it expects a key of projectId + For `deploy` and `reports` it expects a key of organizationId + + For `alerts`, `workflow`, `deploy`, it expects a value of: + - "-1" = for "default" value (i.e. delete the option) + - "0" = disabled + - "1" = enabled + For `reports` it is only a boolean. + For `email` it is a verified email (string). + + :auth required: + :pparam string notification_type: One of: alerts, workflow, reports, deploy, email + :param map: Expects a map of id -> value (enabled or email) + """ + try: + notification_type = FineTuningAPIKey(notification_type) + except ValueError: + return Response( + {"detail": "Unknown notification type: %s." % notification_type}, + status=status.HTTP_404_NOT_FOUND, + ) + + if notification_type == FineTuningAPIKey.REPORTS: + return self._handle_put_reports(user, request.data) + + # Validate that all of the IDs are integers. + try: + for k in request.data.keys(): + int(k) + except ValueError: + return Response( + {"detail": "Invalid id value provided. Id values should be integers."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Make sure that the IDs we are going to update are a subset of the + # user's list of organizations or projects. + + if notification_type == FineTuningAPIKey.EMAIL: + return self._handle_put_emails(user, request.data) + + return self._handle_put_notification_settings(user, notification_type, request.data) + + @staticmethod + def _handle_put_reports(user, data): + user_option, _ = UserOption.objects.get_or_create( + user=user, + key="reports:disabled-organizations", + ) + + value = set(user_option.value or []) + + # The set of IDs of the organizations of which the user is a member. + org_ids = {o.id for o in user_service.get_organizations(user_id=user.id, only_visible=True)} + for org_id, enabled in data.items(): + org_id = int(org_id) + # We want "0" to be falsey + enabled = int(enabled) + + # make sure user is in org + if org_id not in org_ids: + return Response( + {"detail": INVALID_USER_MSG % org_id}, status=status.HTTP_403_FORBIDDEN + ) + + # The list contains organization IDs that should have reports + # DISABLED. If enabled, we need to check if org_id exists in list + # (because by default they will have reports enabled.) + if enabled and org_id in value: + value.remove(org_id) + elif not enabled: + value.add(org_id) + + NotificationSettingOption.objects.create_or_update( + scope_type=NotificationScopeEnum.ORGANIZATION.value, + scope_identifier=org_id, + user_id=user.id, + type=NotificationSettingEnum.REPORTS.value, + values={ + "value": NotificationSettingsOptionEnum.ALWAYS.value + if enabled + else NotificationSettingsOptionEnum.NEVER.value, + }, + ) + + user_option.update(value=list(value)) + return Response(status=status.HTTP_204_NO_CONTENT) + + @staticmethod + def _handle_put_emails(user, data): + # Make sure target emails exist and are verified + emails_to_check = set(data.values()) + emails = UserEmail.objects.filter(user=user, email__in=emails_to_check, is_verified=True) + + # TODO(mgaeta): Is there a better way to check this? + if len(emails) != len(emails_to_check): + return Response({"detail": INVALID_EMAIL_MSG}, status=status.HTTP_400_BAD_REQUEST) + + with transaction.atomic(using=router.db_for_write(UserOption)): + for id, value in data.items(): + user_option, CREATED = UserOption.objects.get_or_create( + user=user, + key="mail:email", + project_id=int(id), + ) + user_option.update(value=str(value)) + + return Response(status=status.HTTP_204_NO_CONTENT) + + @staticmethod + def _handle_put_notification_settings( + user, notification_type: FineTuningAPIKey, data: Mapping[str, Any] + ): + with transaction.atomic(using=router.db_for_write(NotificationSetting)): + for setting_obj_id_str, value_str in data.items(): + setting_obj_id_int = int(setting_obj_id_str) + + # This partitioning always does the same thing because notification_type stays constant. + project_option, organization_option = ( + (None, setting_obj_id_int) + if notification_type == FineTuningAPIKey.DEPLOY + else (setting_obj_id_int, None) + ) + + type = get_type_from_fine_tuning_key(notification_type) + value_int = int(value_str) + NotificationSetting.objects.update_settings( + ExternalProviders.EMAIL, + type, + get_option_value_from_int(type, value_int), + user_id=user.id, + project=project_option, + organization=organization_option, + ) + + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/src/sentry/api/serializers/models/__init__.py b/src/sentry/api/serializers/models/__init__.py index 3b0164fbc9beea..a82c58123b78a8 100644 --- a/src/sentry/api/serializers/models/__init__.py +++ b/src/sentry/api/serializers/models/__init__.py @@ -82,6 +82,7 @@ from .team import * # noqa: F401,F403 from .user import * # noqa: F401,F403 from .user_identity_config import * # noqa: F401,F403 +from .user_notifications import * # noqa: F401,F403 from .user_social_auth import * # noqa: F401,F403 from .useremail import * # noqa: F401,F403 from .userip import * # noqa: F401,F403 diff --git a/src/sentry/api/serializers/models/user_notifications.py b/src/sentry/api/serializers/models/user_notifications.py new file mode 100644 index 00000000000000..cf9a2e3c737722 --- /dev/null +++ b/src/sentry/api/serializers/models/user_notifications.py @@ -0,0 +1,65 @@ +from collections import defaultdict +from typing import Iterable + +from sentry.api.serializers import Serializer +from sentry.models.notificationsetting import NotificationSetting +from sentry.models.options.user_option import UserOption +from sentry.notifications.types import FineTuningAPIKey, NotificationScopeType +from sentry.notifications.utils.legacy_mappings import ( + get_type_from_fine_tuning_key, + map_notification_settings_to_legacy, +) +from sentry.types.integrations import ExternalProviders + + +def handle_legacy(notification_type: FineTuningAPIKey, users: Iterable) -> Iterable: + """For EMAIL and REPORTS, check UserOptions.""" + filter_args = {} + if notification_type == FineTuningAPIKey.EMAIL: + filter_args["project_id__isnull"] = False + + key = { + FineTuningAPIKey.EMAIL: "mail:email", + FineTuningAPIKey.REPORTS: "reports:disabled-organizations", + }.get(notification_type) + + return UserOption.objects.filter(key=key, user__in=users, **filter_args).select_related("user") + + +class UserNotificationsSerializer(Serializer): + def get_attrs(self, item_list, user, **kwargs): + notification_type = kwargs["notification_type"] + type = get_type_from_fine_tuning_key(notification_type) + if not type: + data = handle_legacy(notification_type, item_list) + else: + user_mapping = {user.id: user for user in item_list} + notifications_settings = NotificationSetting.objects._filter( + ExternalProviders.EMAIL, + get_type_from_fine_tuning_key(notification_type), + user_ids=list(user_mapping.keys()), + ).exclude(scope_type=NotificationScopeType.USER.value) + data = map_notification_settings_to_legacy(notifications_settings, user_mapping) + + results = defaultdict(list) + for uo in data: + results[uo.user].append(uo) + + return results + + def serialize(self, obj, attrs, user, **kwargs): + notification_type = kwargs["notification_type"] + data = {} + + for uo in attrs: + if notification_type == FineTuningAPIKey.REPORTS: + # UserOption for key=reports:disabled-organizations saves a list of orgIds + # that should not receive reports + # This UserOption should have both project + organization = None + for org_id in uo.value: + data[org_id] = "0" + elif uo.project_id is not None: + data[uo.project_id] = str(uo.value) + elif uo.organization_id is not None: + data[uo.organization_id] = str(uo.value) + return data diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index 568f7b5710c238..419aafc12574a0 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -583,7 +583,7 @@ from .endpoints.user_index import UserIndexEndpoint from .endpoints.user_ips import UserIPsEndpoint from .endpoints.user_notification_details import UserNotificationDetailsEndpoint -from .endpoints.user_notification_email import UserNotificationEmailEndpoint +from .endpoints.user_notification_fine_tuning import UserNotificationFineTuningEndpoint from .endpoints.user_notification_settings_options import UserNotificationSettingsOptionsEndpoint from .endpoints.user_notification_settings_options_detail import ( UserNotificationSettingsOptionsDetailEndpoint, @@ -876,9 +876,9 @@ name="sentry-api-0-user-notifications", ), re_path( - r"^(?P[^\/]+)/notifications/email/$", - UserNotificationEmailEndpoint.as_view(), - name="sentry-api-0-user-notifications-email", + r"^(?P[^\/]+)/notifications/(?P[^\/]+)/$", + UserNotificationFineTuningEndpoint.as_view(), + name="sentry-api-0-user-notifications-fine-tuning", ), re_path( r"^(?P[^\/]+)/notification-options/$", diff --git a/src/sentry/notifications/types.py b/src/sentry/notifications/types.py index 81330b5b61cc4a..0c390314f3e7d4 100644 --- a/src/sentry/notifications/types.py +++ b/src/sentry/notifications/types.py @@ -205,8 +205,15 @@ class FineTuningAPIKey(Enum): class UserOptionsSettingsKey(Enum): + DEPLOY = "deployNotifications" SELF_ACTIVITY = "personalActivityNotifications" SELF_ASSIGN = "selfAssignOnResolve" + SUBSCRIBE_BY_DEFAULT = "subscribeByDefault" + WORKFLOW = "workflowNotifications" + ACTIVE_RELEASE = "activeReleaseNotifications" + APPROVAL = "approvalNotifications" + QUOTA = "quotaNotifications" + SPIKE_PROTECTION = "spikeProtectionNotifications" VALID_VALUES_FOR_KEY = { diff --git a/src/sentry/notifications/utils/__init__.py b/src/sentry/notifications/utils/__init__.py index e6fb36a96d4f08..30d942b289605d 100644 --- a/src/sentry/notifications/utils/__init__.py +++ b/src/sentry/notifications/utils/__init__.py @@ -47,6 +47,7 @@ from sentry.models.releasecommit import ReleaseCommit from sentry.models.repository import Repository from sentry.models.rule import Rule +from sentry.notifications.notify import notify from sentry.services.hybrid_cloud.integration import integration_service from sentry.services.hybrid_cloud.user import RpcUser from sentry.services.hybrid_cloud.util import region_silo_function @@ -420,8 +421,6 @@ def get_notification_group_title( def send_activity_notification(notification: ActivityNotification | UserReportNotification) -> None: - from sentry.notifications.notify import notify - participants_by_provider = notification.get_participants_with_group_subscription_reason() if participants_by_provider.is_empty(): return diff --git a/src/sentry/notifications/utils/legacy_mappings.py b/src/sentry/notifications/utils/legacy_mappings.py new file mode 100644 index 00000000000000..f96ba8efceab1d --- /dev/null +++ b/src/sentry/notifications/utils/legacy_mappings.py @@ -0,0 +1,224 @@ +from __future__ import annotations + +from collections import namedtuple +from typing import Any, Iterable, Mapping, Optional, Union + +from sentry.models.user import User +from sentry.notifications.types import ( + FineTuningAPIKey, + NotificationScopeType, + NotificationSettingOptionValues, + NotificationSettingTypes, + UserOptionsSettingsKey, +) +from sentry.services.hybrid_cloud.user.model import RpcUser + +LegacyUserOptionClone = namedtuple( + "LegacyUserOptionClone", + [ + "user", + "project_id", + "organization_id", + "key", + "value", + ], +) + +USER_OPTION_SETTINGS = { + UserOptionsSettingsKey.DEPLOY: { + "key": "deploy-emails", + "default": "3", + "type": int, + }, + UserOptionsSettingsKey.SELF_ACTIVITY: { + "key": "self_notifications", + "default": "0", + "type": bool, + }, + UserOptionsSettingsKey.SELF_ASSIGN: { + "key": "self_assign_issue", + "default": "0", + "type": bool, + }, + UserOptionsSettingsKey.SUBSCRIBE_BY_DEFAULT: { + "key": "subscribe_by_default", + "default": "1", + "type": bool, + }, + UserOptionsSettingsKey.WORKFLOW: { + "key": "workflow:notifications", + "default": "1", + "type": int, + }, +} + +KEYS_TO_LEGACY_KEYS = { + NotificationSettingTypes.DEPLOY: "deploy-emails", + NotificationSettingTypes.ISSUE_ALERTS: "mail:alert", + NotificationSettingTypes.WORKFLOW: "workflow:notifications", +} + + +KEY_VALUE_TO_LEGACY_VALUE = { + NotificationSettingTypes.DEPLOY: { + NotificationSettingOptionValues.ALWAYS: 2, + NotificationSettingOptionValues.COMMITTED_ONLY: 3, + NotificationSettingOptionValues.NEVER: 4, + }, + NotificationSettingTypes.ISSUE_ALERTS: { + NotificationSettingOptionValues.ALWAYS: 1, + NotificationSettingOptionValues.NEVER: 0, + }, + NotificationSettingTypes.WORKFLOW: { + NotificationSettingOptionValues.ALWAYS: 0, + NotificationSettingOptionValues.SUBSCRIBE_ONLY: 1, + NotificationSettingOptionValues.NEVER: 2, + }, +} + +LEGACY_VALUE_TO_KEY = { + NotificationSettingTypes.DEPLOY: { + -1: NotificationSettingOptionValues.DEFAULT, + 2: NotificationSettingOptionValues.ALWAYS, + 3: NotificationSettingOptionValues.COMMITTED_ONLY, + 4: NotificationSettingOptionValues.NEVER, + }, + NotificationSettingTypes.ISSUE_ALERTS: { + -1: NotificationSettingOptionValues.DEFAULT, + 0: NotificationSettingOptionValues.NEVER, + 1: NotificationSettingOptionValues.ALWAYS, + }, + NotificationSettingTypes.WORKFLOW: { + -1: NotificationSettingOptionValues.DEFAULT, + 0: NotificationSettingOptionValues.ALWAYS, + 1: NotificationSettingOptionValues.SUBSCRIBE_ONLY, + 2: NotificationSettingOptionValues.NEVER, + }, +} + + +def get_legacy_key(type: NotificationSettingTypes, scope_type: NotificationScopeType) -> str | None: + """Temporary mapping from new enum types to legacy strings.""" + if scope_type == NotificationScopeType.USER and type == NotificationSettingTypes.ISSUE_ALERTS: + return "subscribe_by_default" + + return KEYS_TO_LEGACY_KEYS.get(type) + + +def get_legacy_value(type: NotificationSettingTypes, value: NotificationSettingOptionValues) -> str: + """ + Temporary mapping from new enum types to legacy strings. Each type has a separate mapping. + """ + + return str(KEY_VALUE_TO_LEGACY_VALUE.get(type, {}).get(value)) + + +def get_option_value_from_boolean(value: bool) -> NotificationSettingOptionValues: + if value: + return NotificationSettingOptionValues.ALWAYS + else: + return NotificationSettingOptionValues.NEVER + + +def get_option_value_from_int( + type: NotificationSettingTypes, value: int +) -> NotificationSettingOptionValues | None: + return LEGACY_VALUE_TO_KEY.get(type, {}).get(value) + + +def get_type_from_fine_tuning_key(key: FineTuningAPIKey) -> NotificationSettingTypes | None: + return { + FineTuningAPIKey.ALERTS: NotificationSettingTypes.ISSUE_ALERTS, + FineTuningAPIKey.DEPLOY: NotificationSettingTypes.DEPLOY, + FineTuningAPIKey.WORKFLOW: NotificationSettingTypes.WORKFLOW, + }.get(key) + + +def get_type_from_user_option_settings_key( + key: UserOptionsSettingsKey, +) -> NotificationSettingTypes | None: + return { + UserOptionsSettingsKey.DEPLOY: NotificationSettingTypes.DEPLOY, + UserOptionsSettingsKey.WORKFLOW: NotificationSettingTypes.WORKFLOW, + UserOptionsSettingsKey.SUBSCRIBE_BY_DEFAULT: NotificationSettingTypes.ISSUE_ALERTS, + }.get(key) + + +def get_key_from_legacy(key: str) -> NotificationSettingTypes | None: + return { + "deploy-emails": NotificationSettingTypes.DEPLOY, + "mail:alert": NotificationSettingTypes.ISSUE_ALERTS, + "subscribe_by_default": NotificationSettingTypes.ISSUE_ALERTS, + "workflow:notifications": NotificationSettingTypes.WORKFLOW, + }.get(key) + + +def get_key_value_from_legacy( + key: str, value: Any +) -> tuple[NotificationSettingTypes | None, NotificationSettingOptionValues | None]: + type = get_key_from_legacy(key) + if type not in LEGACY_VALUE_TO_KEY: + return None, None + option_value = LEGACY_VALUE_TO_KEY.get(type, {}).get(int(value)) + + return type, option_value + + +def get_legacy_object( + notification_setting: Any, + user_mapping: Optional[Mapping[int, Union[User, RpcUser]]] = None, +) -> Any: + type = NotificationSettingTypes(notification_setting.type) + value = NotificationSettingOptionValues(notification_setting.value) + scope_type = NotificationScopeType(notification_setting.scope_type) + key = get_legacy_key(type, scope_type) + + data = { + "key": key, + "value": get_legacy_value(type, value), + "user": user_mapping.get(notification_setting.user_id) if user_mapping else None, + "project_id": None, + "organization_id": None, + } + + if scope_type == NotificationScopeType.PROJECT: + data["project_id"] = notification_setting.scope_identifier + if scope_type == NotificationScopeType.ORGANIZATION: + data["organization_id"] = notification_setting.scope_identifier + + return LegacyUserOptionClone(**data) + + +def map_notification_settings_to_legacy( + notification_settings: Iterable[Any], + user_mapping: Mapping[int, Union[User, RpcUser]], +) -> list[Any]: + """A hack for legacy serializers. Pretend a list of NotificationSettings is a list of UserOptions.""" + return [ + get_legacy_object(notification_setting, user_mapping) + for notification_setting in notification_settings + ] + + +def get_parent_mappings( + notification_settings: Iterable[Any], +) -> tuple[Mapping[int, Any], Mapping[int, Any]]: + """Prefetch a list of Project or Organization objects for the Serializer.""" + from sentry.models.organization import Organization + from sentry.models.project import Project + + project_ids = [] + organization_ids = [] + for notification_setting in notification_settings: + if notification_setting.scope_type == NotificationScopeType.PROJECT.value: + project_ids.append(notification_setting.scope_identifier) + if notification_setting.scope_type == NotificationScopeType.ORGANIZATION.value: + organization_ids.append(notification_setting.scope_identifier) + + projects = Project.objects.filter(id__in=project_ids) + organizations = Organization.objects.filter(id__in=organization_ids) + + project_mapping = {project.id: project for project in projects} + organization_mapping = {organization.id: organization for organization in organizations} + + return project_mapping, organization_mapping diff --git a/src/sentry/search/events/constants.py b/src/sentry/search/events/constants.py index f45488a79b348f..9b34d21f76ed4f 100644 --- a/src/sentry/search/events/constants.py +++ b/src/sentry/search/events/constants.py @@ -269,6 +269,17 @@ class ThresholdDict(TypedDict): MEASUREMENTS_FRAMES_FROZEN_RATE: "d:transactions/measurements.frames_frozen_rate@ratio", MEASUREMENTS_FRAMES_SLOW_RATE: "d:transactions/measurements.frames_slow_rate@ratio", MEASUREMENTS_STALL_PERCENTAGE: "d:transactions/measurements.stall_percentage@ratio", + "measurements.score.lcp": "d:transactions/measurements.score.lcp@ratio", + "measurements.score.fid": "d:transactions/measurements.score.fid@ratio", + "measurements.score.cls": "d:transactions/measurements.score.cls@ratio", + "measurements.score.fcp": "d:transactions/measurements.score.fcp@ratio", + "measurements.score.ttfb": "d:transactions/measurements.score.ttfb@ratio", + "measurements.score.total": "d:transactions/measurements.score.total@ratio", + "measurements.score.weight.lcp": "d:transactions/measurements.score.weight.lcp@ratio", + "measurements.score.weight.fid": "d:transactions/measurements.score.weight.fid@ratio", + "measurements.score.weight.cls": "d:transactions/measurements.score.weight.cls@ratio", + "measurements.score.weight.fcp": "d:transactions/measurements.score.weight.fcp@ratio", + "measurements.score.weight.ttfb": "d:transactions/measurements.score.weight.ttfb@ratio", "spans.browser": "d:transactions/breakdowns.span_ops.ops.browser@millisecond", "spans.db": "d:transactions/breakdowns.span_ops.ops.db@millisecond", "spans.http": "d:transactions/breakdowns.span_ops.ops.http@millisecond", diff --git a/src/sentry/search/events/datasets/metrics.py b/src/sentry/search/events/datasets/metrics.py index 0f9ccd0dc53f06..f69379974a7afa 100644 --- a/src/sentry/search/events/datasets/metrics.py +++ b/src/sentry/search/events/datasets/metrics.py @@ -581,6 +581,25 @@ def function_converter(self) -> Mapping[str, fields.MetricsFunction]: snql_distribution=self._resolve_web_vital_function, default_result_type="integer", ), + fields.MetricsFunction( + "performance_score", + required_args=[ + fields.MetricArg( + "column", + allowed_columns=[ + "measurements.score.fcp", + "measurements.score.lcp", + "measurements.score.fid", + "measurements.score.cls", + "measurements.score.ttfb", + ], + allow_custom_measurements=False, + ) + ], + calculated_args=[resolve_metric_id], + snql_distribution=self._resolve_web_vital_score_function, + default_result_type="integer", + ), fields.MetricsFunction( "epm", snql_distribution=self._resolve_epm, @@ -1232,6 +1251,46 @@ def _resolve_web_vital_function( alias, ) + def _resolve_web_vital_score_function( + self, + args: Mapping[str, Union[str, Column, SelectType, int, float]], + alias: str, + ) -> SelectType: + column = args["column"] + metric_id = args["metric_id"] + + if column not in [ + "measurements.score.lcp", + "measurements.score.fcp", + "measurements.score.fid", + "measurements.score.cls", + "measurements.score.ttfb", + ]: + raise InvalidSearchQuery("performance_score only supports measurements") + + weight_metric_id = self.resolve_metric(column.replace("score", "score.weight")) + + return Function( + "divide", + [ + Function( + "sumIf", + [ + Column("value"), + Function("equals", [Column("metric_id"), metric_id]), + ], + ), + Function( + "sumIf", + [ + Column("value"), + Function("equals", [Column("metric_id"), weight_metric_id]), + ], + ), + ], + alias, + ) + def _resolve_total_transaction_duration(self, alias: str, scope: str) -> SelectType: """This calculates the total time, and based on the scope will return either the apps total time or whatever other local scope/filters are diff --git a/src/sentry/tasks/integrations/github/open_pr_comment.py b/src/sentry/tasks/integrations/github/open_pr_comment.py index 9baf5812ef6225..500c372b9709d7 100644 --- a/src/sentry/tasks/integrations/github/open_pr_comment.py +++ b/src/sentry/tasks/integrations/github/open_pr_comment.py @@ -1,5 +1,6 @@ from __future__ import annotations +import itertools import logging from datetime import datetime, timedelta from typing import Any, Dict, List, Set, Tuple @@ -12,16 +13,23 @@ from sentry.integrations.github.client import GitHubAppsClient from sentry.models.group import Group from sentry.models.integrations.repository_project_path_config import RepositoryProjectPathConfig +from sentry.models.options.organization_option import OrganizationOption +from sentry.models.organization import Organization from sentry.models.project import Project -from sentry.models.pullrequest import PullRequest +from sentry.models.pullrequest import CommentType, PullRequest from sentry.models.repository import Repository +from sentry.services.hybrid_cloud.integration import integration_service from sentry.shared_integrations.exceptions.base import ApiError +from sentry.silo.base import SiloMode from sentry.snuba.dataset import Dataset from sentry.snuba.referrer import Referrer +from sentry.tasks.base import instrumented_task from sentry.tasks.integrations.github.pr_comment import ( + ISSUE_LOCKED_ERROR_MESSAGE, RATE_LIMITED_MESSAGE, GithubAPIErrorType, PullRequestIssue, + create_or_update_comment, format_comment_url, ) from sentry.templatetags.sentry_helpers import small_count @@ -31,7 +39,7 @@ logger = logging.getLogger(__name__) -OPEN_PR_METRIC_BASE = "github_open_pr_comment.{key}" +OPEN_PR_METRICS_BASE = "github_open_pr_comment.{key}" # Caps the number of files that can be modified in a PR to leave a comment OPEN_PR_MAX_FILES_CHANGED = 7 @@ -101,11 +109,12 @@ def format_issue_table(diff_filename: str, issues: List[PullRequestIssue], toggl def get_issue_table_contents(issue_list: List[Dict[str, int]]) -> List[PullRequestIssue]: group_id_to_info = {} for issue in issue_list: - group_id = issue.pop("group_id") - group_id_to_info[group_id] = issue + group_id = issue["group_id"] + group_id_to_info[group_id] = dict(filter(lambda k: k[0] != "group_id", issue.items())) issues = Group.objects.filter(id__in=list(group_id_to_info.keys())).all() - return [ + + pull_request_issues = [ PullRequestIssue( title=issue.title, subtitle=issue.culprit, @@ -116,6 +125,9 @@ def get_issue_table_contents(issue_list: List[Dict[str, int]]) -> List[PullReque ) for issue in issues ] + pull_request_issues.sort(key=lambda k: k.event_count or 0, reverse=True) + + return pull_request_issues # TODO(cathy): Change the client typing to allow for multiple SCM Integrations @@ -129,17 +141,17 @@ def safe_for_comment( except ApiError as e: if e.json and RATE_LIMITED_MESSAGE in e.json.get("message", ""): metrics.incr( - OPEN_PR_METRIC_BASE.format(key="api_error"), + OPEN_PR_METRICS_BASE.format(key="api_error"), tags={"type": GithubAPIErrorType.RATE_LIMITED.value, "code": e.code}, ) elif e.code == 404: metrics.incr( - OPEN_PR_METRIC_BASE.format(key="api_error"), + OPEN_PR_METRICS_BASE.format(key="api_error"), tags={"type": GithubAPIErrorType.MISSING_PULL_REQUEST.value, "code": e.code}, ) else: metrics.incr( - OPEN_PR_METRIC_BASE.format(key="api_error"), + OPEN_PR_METRICS_BASE.format(key="api_error"), tags={"type": GithubAPIErrorType.UNKNOWN.value, "code": e.code}, ) logger.exception("github.open_pr_comment.unknown_api_error", extra={"error": str(e)}) @@ -148,17 +160,17 @@ def safe_for_comment( safe_to_comment = True if pullrequest_resp["state"] != "open": metrics.incr( - OPEN_PR_METRIC_BASE.format(key="rejected_comment"), tags={"reason": "incorrect_state"} + OPEN_PR_METRICS_BASE.format(key="rejected_comment"), tags={"reason": "incorrect_state"} ) safe_to_comment = False if pullrequest_resp["changed_files"] > OPEN_PR_MAX_FILES_CHANGED: metrics.incr( - OPEN_PR_METRIC_BASE.format(key="rejected_comment"), tags={"reason": "too_many_files"} + OPEN_PR_METRICS_BASE.format(key="rejected_comment"), tags={"reason": "too_many_files"} ) safe_to_comment = False if pullrequest_resp["additions"] + pullrequest_resp["deletions"] > OPEN_PR_MAX_LINES_CHANGED: metrics.incr( - OPEN_PR_METRIC_BASE.format(key="rejected_comment"), tags={"reason": "too_many_lines"} + OPEN_PR_METRICS_BASE.format(key="rejected_comment"), tags={"reason": "too_many_lines"} ) safe_to_comment = False return safe_to_comment @@ -230,6 +242,7 @@ def get_top_5_issues_by_count_for_file( Condition(Column("group_id"), Op.IN, group_ids), Condition(Column("timestamp"), Op.GTE, datetime.now() - timedelta(days=14)), Condition(Column("timestamp"), Op.LT, datetime.now()), + # NOTE: this currently looks at the top frame of the stack trace (old suspect commit logic) Condition( Function("arrayElement", (Column("exception_frames.filename"), -1)), Op.IN, @@ -242,3 +255,141 @@ def get_top_5_issues_by_count_for_file( ), ) return raw_snql_query(request, referrer=Referrer.GITHUB_PR_COMMENT_BOT.value)["data"] + + +@instrumented_task( + name="sentry.tasks.integrations.open_pr_comment_workflow", silo_mode=SiloMode.REGION +) +def open_pr_comment_workflow(pr_id: int) -> None: + # CHECKS + # check PR exists to get PR key + try: + pull_request = PullRequest.objects.get(id=pr_id) + except PullRequest.DoesNotExist: + metrics.incr(OPEN_PR_METRICS_BASE.format(key="error"), tags={"type": "missing_pr"}) + return + + # check org option + org_id = pull_request.organization_id + try: + organization = Organization.objects.get_from_cache(id=org_id) + except Organization.DoesNotExist: + logger.error("github.open_pr_comment.org_missing") + metrics.incr(OPEN_PR_METRICS_BASE.format(key="error"), tags={"type": "missing_org"}) + return + + if not OrganizationOption.objects.get_value( + organization=organization, + key="sentry:github_open_pr_bot", + default=True, + ): + logger.info("github.open_pr_comment.option_missing", extra={"organization_id": org_id}) + return + + # check PR repo exists to get repo name + try: + repo = Repository.objects.get(id=pull_request.repository_id) + except Repository.DoesNotExist: + logger.info("github.open_pr_comment.repo_missing", extra={"organization_id": org_id}) + metrics.incr(OPEN_PR_METRICS_BASE.format(key="error"), tags={"type": "missing_repo"}) + return + + # check integration exists to hit Github API with client + integration = integration_service.get_integration(integration_id=repo.integration_id) + if not integration: + logger.info("github.open_pr_comment.integration_missing", extra={"organization_id": org_id}) + metrics.incr(OPEN_PR_METRICS_BASE.format(key="error"), tags={"type": "missing_integration"}) + return + + installation = integration.get_installation(organization_id=org_id) + + client = installation.get_client() + + # CREATING THE COMMENT + if not safe_for_comment(gh_client=client, repository=repo, pull_request=pull_request): + metrics.incr( + OPEN_PR_METRICS_BASE.format(key="error"), + tags={"type": "unsafe_for_comment"}, + ) + return + + pr_filenames = get_pr_filenames(gh_client=client, repository=repo, pull_request=pull_request) + + issue_table_contents = {} + top_issues_per_file = [] + + # fetch issues related to the files + for pr_filename in pr_filenames: + projects, sentry_filenames = get_projects_and_filenames_from_source_file( + org_id, pr_filename + ) + if not len(projects) or not len(sentry_filenames): + continue + + top_issues = get_top_5_issues_by_count_for_file(list(projects), list(sentry_filenames)) + if not len(top_issues): + continue + + top_issues_per_file.append(top_issues) + + issue_table_contents[pr_filename] = get_issue_table_contents(top_issues) + + if not len(issue_table_contents): + # don't leave a comment if no issues for files in PR + metrics.incr(OPEN_PR_METRICS_BASE.format(key="no_issues")) + return + + # format issues per file into comment + issue_tables = [] + first_table = True + for pr_filename in pr_filenames: + issue_table_content = issue_table_contents.get(pr_filename, None) + + if issue_table_content is None: + continue + + if first_table: + issue_table = format_issue_table(pr_filename, issue_table_content) + first_table = False + else: + # toggle all tables but the first one + issue_table = format_issue_table(pr_filename, issue_table_content, toggle=True) + + issue_tables.append(issue_table) + + comment_body = format_open_pr_comment(issue_tables) + + # list all issues in the comment + issue_list: List[Dict[str, Any]] = list(itertools.chain.from_iterable(top_issues_per_file)) + issue_id_list: List[int] = [issue["group_id"] for issue in issue_list] + + try: + create_or_update_comment( + pr_comment=None, + client=client, + repo=repo, + pr_key=pull_request.key, + comment_body=comment_body, + pullrequest_id=pull_request.id, + issue_list=issue_id_list, + comment_type=CommentType.OPEN_PR, + metrics_base=OPEN_PR_METRICS_BASE, + ) + except ApiError as e: + if e.json: + if ISSUE_LOCKED_ERROR_MESSAGE in e.json.get("message", ""): + metrics.incr( + OPEN_PR_METRICS_BASE.format(key="error"), + tags={"type": "issue_locked_error"}, + ) + return + + elif RATE_LIMITED_MESSAGE in e.json.get("message", ""): + metrics.incr( + OPEN_PR_METRICS_BASE.format(key="error"), + tags={"type": "rate_limited_error"}, + ) + return + + metrics.incr(OPEN_PR_METRICS_BASE.format(key="error"), tags={"type": "api_error"}) + raise e diff --git a/src/sentry/tasks/integrations/github/pr_comment.py b/src/sentry/tasks/integrations/github/pr_comment.py index ba8874988b7767..acebcd2e235f6a 100644 --- a/src/sentry/tasks/integrations/github/pr_comment.py +++ b/src/sentry/tasks/integrations/github/pr_comment.py @@ -35,7 +35,7 @@ logger = logging.getLogger(__name__) -METRICS_BASE = "github_pr_comment.{key}" +MERGED_PR_METRICS_BASE = "github_pr_comment.{key}" @dataclass @@ -155,7 +155,8 @@ def create_or_update_comment( comment_body: str, pullrequest_id: int, issue_list: List[int], - comment_type: CommentType = CommentType.MERGED_PR, + comment_type: int = CommentType.MERGED_PR, + metrics_base=MERGED_PR_METRICS_BASE, ): # client will raise ApiError if the request is not successful if pr_comment is None: @@ -170,12 +171,12 @@ def create_or_update_comment( group_ids=issue_list, comment_type=comment_type, ) - metrics.incr(METRICS_BASE.format(key="comment_created")) + metrics.incr(metrics_base.format(key="comment_created")) else: resp = client.update_comment( repo=repo.name, comment_id=pr_comment.external_id, data={"body": comment_body} ) - metrics.incr(METRICS_BASE.format(key="comment_updated")) + metrics.incr(metrics_base.format(key="comment_updated")) pr_comment.updated_at = timezone.now() pr_comment.group_ids = issue_list pr_comment.save() @@ -200,8 +201,8 @@ def github_comment_workflow(pullrequest_id: int, project_id: int): organization = Organization.objects.get_from_cache(id=org_id) except Organization.DoesNotExist: cache.delete(cache_key) - logger.error("github.pr_comment.org_missing") - metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "missing_org"}) + logger.info("github.pr_comment.org_missing") + metrics.incr(MERGED_PR_METRICS_BASE.format(key="error"), tags={"type": "missing_org"}) return if not OrganizationOption.objects.get_value( @@ -209,7 +210,7 @@ def github_comment_workflow(pullrequest_id: int, project_id: int): key="sentry:github_pr_bot", default=True, ): - logger.error("github.pr_comment.option_missing", extra={"organization_id": org_id}) + logger.info("github.pr_comment.option_missing", extra={"organization_id": org_id}) return pr_comment = None @@ -223,8 +224,8 @@ def github_comment_workflow(pullrequest_id: int, project_id: int): project = Project.objects.get_from_cache(id=project_id) except Project.DoesNotExist: cache.delete(cache_key) - logger.error("github.pr_comment.project_missing", extra={"organization_id": org_id}) - metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "missing_project"}) + logger.info("github.pr_comment.project_missing", extra={"organization_id": org_id}) + metrics.incr(MERGED_PR_METRICS_BASE.format(key="error"), tags={"type": "missing_project"}) return top_5_issues = get_top_5_issues_by_count(issue_list, project) @@ -235,15 +236,17 @@ def github_comment_workflow(pullrequest_id: int, project_id: int): repo = Repository.objects.get(id=gh_repo_id) except Repository.DoesNotExist: cache.delete(cache_key) - logger.error("github.pr_comment.repo_missing", extra={"organization_id": org_id}) - metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "missing_repo"}) + logger.info("github.pr_comment.repo_missing", extra={"organization_id": org_id}) + metrics.incr(MERGED_PR_METRICS_BASE.format(key="error"), tags={"type": "missing_repo"}) return integration = integration_service.get_integration(integration_id=repo.integration_id) if not integration: cache.delete(cache_key) - logger.error("github.pr_comment.integration_missing", extra={"organization_id": org_id}) - metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "missing_integration"}) + logger.info("github.pr_comment.integration_missing", extra={"organization_id": org_id}) + metrics.incr( + MERGED_PR_METRICS_BASE.format(key="error"), tags={"type": "missing_integration"} + ) return installation = integration.get_installation(organization_id=org_id) @@ -272,14 +275,18 @@ def github_comment_workflow(pullrequest_id: int, project_id: int): if e.json: if ISSUE_LOCKED_ERROR_MESSAGE in e.json.get("message", ""): - metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "issue_locked_error"}) + metrics.incr( + MERGED_PR_METRICS_BASE.format(key="error"), tags={"type": "issue_locked_error"} + ) return elif RATE_LIMITED_MESSAGE in e.json.get("message", ""): - metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "rate_limited_error"}) + metrics.incr( + MERGED_PR_METRICS_BASE.format(key="error"), tags={"type": "rate_limited_error"} + ) return - metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "api_error"}) + metrics.incr(MERGED_PR_METRICS_BASE.format(key="error"), tags={"type": "api_error"}) raise e @@ -303,7 +310,7 @@ def github_comment_reactions(): integration = integration_service.get_integration(integration_id=repo.integration_id) if not integration: - logger.error( + logger.info( "github.pr_comment.comment_reactions.integration_missing", extra={"organization_id": pr.organization_id}, ) diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index d4b4ed4c0915b5..d7695dbce2f443 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -1802,6 +1802,17 @@ class MetricsEnhancedPerformanceTestCase(BaseMetricsLayerTestCase, TestCase): "measurements.cls": "metrics_distributions", "measurements.frames_frozen_rate": "metrics_distributions", "measurements.time_to_initial_display": "metrics_distributions", + "measurements.score.lcp": "metrics_distributions", + "measurements.score.fcp": "metrics_distributions", + "measurements.score.fid": "metrics_distributions", + "measurements.score.cls": "metrics_distributions", + "measurements.score.ttfb": "metrics_distributions", + "measurements.score.total": "metrics_distributions", + "measurements.score.weight.lcp": "metrics_distributions", + "measurements.score.weight.fcp": "metrics_distributions", + "measurements.score.weight.fid": "metrics_distributions", + "measurements.score.weight.cls": "metrics_distributions", + "measurements.score.weight.ttfb": "metrics_distributions", "spans.http": "metrics_distributions", "user": "metrics_sets", } diff --git a/src/sentry/testutils/helpers/slack.py b/src/sentry/testutils/helpers/slack.py index 346bf2b6660cf5..906e2b88969f3d 100644 --- a/src/sentry/testutils/helpers/slack.py +++ b/src/sentry/testutils/helpers/slack.py @@ -4,6 +4,7 @@ import responses from sentry.integrations.slack.message_builder import SlackBody +from sentry.integrations.slack.notifications import send_notification_as_slack from sentry.models.identity import Identity, IdentityProvider, IdentityStatus from sentry.models.integrations.external_actor import ExternalActor from sentry.models.integrations.integration import Integration @@ -91,6 +92,11 @@ def link_team(team: Team, integration: Integration, channel_name: str, channel_i ) +def send_notification(provider, *args_list): + if provider == ExternalProviders.SLACK: + send_notification_as_slack(*args_list, {}) + + def get_channel(index=0): """Get the channel ID the Slack message went to""" assert len(responses.calls) >= 1 diff --git a/static/app/components/events/eventReplay/replayPreview.tsx b/static/app/components/events/eventReplay/replayPreview.tsx index a9a612a653acab..ca246dcb12e676 100644 --- a/static/app/components/events/eventReplay/replayPreview.tsx +++ b/static/app/components/events/eventReplay/replayPreview.tsx @@ -26,15 +26,15 @@ type Props = { orgSlug: string; replaySlug: string; buttonProps?: Partial>; - fromFeedback?: boolean; + focusTab?: TabKey; }; function ReplayPreview({ + buttonProps, + eventTimestampMs, + focusTab, orgSlug, replaySlug, - eventTimestampMs, - buttonProps, - fromFeedback, }: Props) { const routes = useRoutes(); const {fetching, replay, replayRecord, fetchError, replayId} = useReplayReader({ @@ -122,7 +122,7 @@ function ReplayPreview({ pathname: normalizeUrl(`/organizations/${orgSlug}/replays/${replayId}/`), query: { referrer: getRouteStringFromRoutes(routes), - t_main: fromFeedback ? TabKey.BREADCRUMBS : TabKey.ERRORS, + t_main: focusTab ?? TabKey.ERRORS, t: initialTimeOffsetMs / 1000, }, }; diff --git a/static/app/components/feedback/feedbackItem/replaySection.tsx b/static/app/components/feedback/feedbackItem/replaySection.tsx index 31772fe931cb5e..a33027fc89ecc5 100644 --- a/static/app/components/feedback/feedbackItem/replaySection.tsx +++ b/static/app/components/feedback/feedbackItem/replaySection.tsx @@ -7,6 +7,7 @@ import ReplayIdCountProvider from 'sentry/components/replays/replayIdCountProvid import {IconPlay} from 'sentry/icons'; import {t} from 'sentry/locale'; import {Organization} from 'sentry/types'; +import {TabKey} from 'sentry/utils/replays/hooks/useActiveReplayTab'; interface Props { eventTimestampMs: number; @@ -26,10 +27,10 @@ export default function ReplaySection({eventTimestampMs, organization, replayId} { diff --git a/static/app/components/profiling/flamegraph/flamegraphUIFramesTooltip.tsx b/static/app/components/profiling/flamegraph/flamegraphUIFramesTooltip.tsx index f44f9aac17658c..0e7ce5d36daf40 100644 --- a/static/app/components/profiling/flamegraph/flamegraphUIFramesTooltip.tsx +++ b/static/app/components/profiling/flamegraph/flamegraphUIFramesTooltip.tsx @@ -7,7 +7,7 @@ import {t} from 'sentry/locale'; import {CanvasView} from 'sentry/utils/profiling/canvasView'; import {toRGBAString} from 'sentry/utils/profiling/colors/utils'; import {FlamegraphCanvas} from 'sentry/utils/profiling/flamegraphCanvas'; -import {UIFramesRenderer} from 'sentry/utils/profiling/renderers/uiFramesRenderer'; +import {UIFramesRenderer} from 'sentry/utils/profiling/renderers/UIFramesRenderer'; import {Rect} from 'sentry/utils/profiling/speedscope'; import {UIFrames} from 'sentry/utils/profiling/uiFrames'; diff --git a/static/app/data/controlsiloUrlPatterns.ts b/static/app/data/controlsiloUrlPatterns.ts index 3f594b447f86f1..6bdc1504f59cce 100644 --- a/static/app/data/controlsiloUrlPatterns.ts +++ b/static/app/data/controlsiloUrlPatterns.ts @@ -59,7 +59,7 @@ const patterns: RegExp[] = [ new RegExp('^api/0/users/[^/]+/identities/$'), new RegExp('^api/0/users/[^/]+/ips/$'), new RegExp('^api/0/users/[^/]+/notifications/$'), - new RegExp('^api/0/users/[^/]+/notifications/email/$'), + new RegExp('^api/0/users/[^/]+/notifications/[^/]+/$'), new RegExp('^api/0/users/[^/]+/notification-options/$'), new RegExp('^api/0/users/[^/]+/notification-options/[^/]+/$'), new RegExp('^api/0/users/[^/]+/notification-providers/$'), diff --git a/static/app/utils/profiling/renderers/UIFramesRenderer.tsx b/static/app/utils/profiling/renderers/UIFramesRenderer.tsx new file mode 100644 index 00000000000000..7296664843efa1 --- /dev/null +++ b/static/app/utils/profiling/renderers/UIFramesRenderer.tsx @@ -0,0 +1,37 @@ +import {mat3} from 'gl-matrix'; + +import {FlamegraphTheme} from 'sentry/utils/profiling/flamegraph/flamegraphTheme'; +import {UIFrames} from 'sentry/utils/profiling/uiFrames'; + +export abstract class UIFramesRenderer { + ctx: CanvasRenderingContext2D | WebGLRenderingContext | null = null; + canvas: HTMLCanvasElement | null; + uiFrames: UIFrames; + theme: FlamegraphTheme; + options: { + draw_border: boolean; + }; + + constructor( + canvas: HTMLCanvasElement, + uiFrames: UIFrames, + theme: FlamegraphTheme, + options: {draw_border: boolean} = {draw_border: false} + ) { + this.canvas = canvas; + this.uiFrames = uiFrames; + this.theme = theme; + this.options = options; + } + + getColorForFrame( + type: UIFrames['frames'][0]['type'] + ): [number, number, number, number] { + if (type === 'frozen') { + return this.theme.COLORS.UI_FRAME_COLOR_FROZEN; + } + return this.theme.COLORS.UI_FRAME_COLOR_SLOW; + } + + abstract draw(configViewToPhysicalSpace: mat3): void; +} diff --git a/static/app/utils/profiling/renderers/UIFramesRenderer2D.tsx b/static/app/utils/profiling/renderers/UIFramesRenderer2D.tsx new file mode 100644 index 00000000000000..ceedfc02b8aa62 --- /dev/null +++ b/static/app/utils/profiling/renderers/UIFramesRenderer2D.tsx @@ -0,0 +1,61 @@ +import {mat3} from 'gl-matrix'; + +import {colorComponentsToRGBA} from 'sentry/utils/profiling/colors/utils'; +import {FlamegraphTheme} from 'sentry/utils/profiling/flamegraph/flamegraphTheme'; +import {getContext, resizeCanvasToDisplaySize} from 'sentry/utils/profiling/gl/utils'; +import { + DEFAULT_FLAMEGRAPH_RENDERER_OPTIONS, + FlamegraphRendererOptions, +} from 'sentry/utils/profiling/renderers/flamegraphRenderer'; +import {UIFramesRenderer} from 'sentry/utils/profiling/renderers/UIFramesRenderer'; +import {Rect} from 'sentry/utils/profiling/speedscope'; +import {UIFrames} from 'sentry/utils/profiling/uiFrames'; + +export class UIFramesRenderer2D extends UIFramesRenderer { + ctx: CanvasRenderingContext2D | null = null; + + constructor( + canvas: HTMLCanvasElement, + uiFrames: UIFrames, + theme: FlamegraphTheme, + options: FlamegraphRendererOptions = DEFAULT_FLAMEGRAPH_RENDERER_OPTIONS + ) { + super(canvas, uiFrames, theme, options); + this.initCanvasContext(); + } + + initCanvasContext(): void { + if (!this.canvas) { + throw new Error('Cannot initialize context from null canvas'); + } + // Setup webgl canvas context + this.ctx = getContext(this.canvas, '2d'); + + if (!this.ctx) { + throw new Error('Could not get canvas 2d context'); + } + resizeCanvasToDisplaySize(this.canvas); + } + + draw(configViewToPhysicalSpace: mat3): void { + if (!this.canvas) { + throw new Error('No canvas to draw on'); + } + + if (!this.ctx) { + throw new Error('No canvas context to draw with'); + } + + const border = window.devicePixelRatio; + this.ctx.clearRect(0, 0, this.canvas.width, this.canvas.height); + + for (const frame of this.uiFrames.frames) { + const rect = new Rect(frame.start, 0, frame.end - frame.start, 1).transformRect( + configViewToPhysicalSpace + ); + + this.ctx.fillStyle = colorComponentsToRGBA(this.getColorForFrame(frame.type)); + this.ctx.fillRect(rect.x + border, rect.y, rect.width - border, rect.height); + } + } +} diff --git a/static/app/utils/profiling/renderers/uiFramesRenderer.spec.tsx b/static/app/utils/profiling/renderers/uiFramesRendererWebGL.spec.tsx similarity index 90% rename from static/app/utils/profiling/renderers/uiFramesRenderer.spec.tsx rename to static/app/utils/profiling/renderers/uiFramesRendererWebGL.spec.tsx index 702ba6f72626da..a1eff9eed0d096 100644 --- a/static/app/utils/profiling/renderers/uiFramesRenderer.spec.tsx +++ b/static/app/utils/profiling/renderers/uiFramesRendererWebGL.spec.tsx @@ -2,7 +2,7 @@ import {vec2} from 'gl-matrix'; import {makeCanvasMock, makeContextMock} from 'sentry-test/profiling/utils'; -import {UIFramesRenderer} from 'sentry/utils/profiling/renderers/uiFramesRenderer'; +import {UIFramesRendererWebGL} from 'sentry/utils/profiling/renderers/uiFramesRendererWebGL'; import {Rect} from 'sentry/utils/profiling/speedscope'; import {UIFrames} from 'sentry/utils/profiling/uiFrames'; @@ -48,7 +48,7 @@ describe('UIFramesRenderer', () => { {unit: 'nanoseconds'}, new Rect(0, 0, 10, 1) ); - const renderer = new UIFramesRenderer(canvas, uiFrames, LightFlamegraphTheme); + const renderer = new UIFramesRendererWebGL(canvas, uiFrames, LightFlamegraphTheme); it.each([ [vec2.fromValues(-1, 0), null], diff --git a/static/app/utils/profiling/renderers/uiFramesRenderer.tsx b/static/app/utils/profiling/renderers/uiFramesRendererWebGL.tsx similarity index 96% rename from static/app/utils/profiling/renderers/uiFramesRenderer.tsx rename to static/app/utils/profiling/renderers/uiFramesRendererWebGL.tsx index 2f7fe72c8b92d3..a85a3d8def2739 100644 --- a/static/app/utils/profiling/renderers/uiFramesRenderer.tsx +++ b/static/app/utils/profiling/renderers/uiFramesRendererWebGL.tsx @@ -2,8 +2,6 @@ import * as Sentry from '@sentry/react'; import {mat3, vec2} from 'gl-matrix'; import {FlamegraphTheme} from 'sentry/utils/profiling/flamegraph/flamegraphTheme'; -import {Rect} from 'sentry/utils/profiling/speedscope'; - import { createAndBindBuffer, createProgram, @@ -15,8 +13,10 @@ import { resizeCanvasToDisplaySize, safeGetContext, upperBound, -} from '../gl/utils'; -import {UIFrameNode, UIFrames} from '../uiFrames'; +} from 'sentry/utils/profiling/gl/utils'; +import {UIFramesRenderer} from 'sentry/utils/profiling/renderers/UIFramesRenderer'; +import {Rect} from 'sentry/utils/profiling/speedscope'; +import {UIFrameNode, UIFrames} from 'sentry/utils/profiling/uiFrames'; import {uiFramesFragment, uiFramesVertext} from './shaders'; @@ -25,15 +25,10 @@ const PHYSICAL_SPACE_PX = new Rect(0, 0, 1, 1); const CONFIG_TO_PHYSICAL_SPACE = mat3.create(); const VERTICES_PER_FRAME = 6; -class UIFramesRenderer { - canvas: HTMLCanvasElement | null; - uiFrames: UIFrames; - +class UIFramesRendererWebGL extends UIFramesRenderer { ctx: WebGLRenderingContext | null = null; program: WebGLProgram | null = null; - theme: FlamegraphTheme; - // Vertex and color buffer positions: Float32Array = new Float32Array(); frame_types: Float32Array = new Float32Array(); @@ -61,20 +56,13 @@ class UIFramesRenderer { u_projection: null, }; - options: { - draw_border: boolean; - }; - constructor( canvas: HTMLCanvasElement, uiFrames: UIFrames, theme: FlamegraphTheme, options: {draw_border: boolean} = {draw_border: false} ) { - this.uiFrames = uiFrames; - this.canvas = canvas; - this.theme = theme; - this.options = options; + super(canvas, uiFrames, theme, options); const initialized = this.initCanvasContext(); if (!initialized) { @@ -335,4 +323,4 @@ class UIFramesRenderer { } } -export {UIFramesRenderer}; +export {UIFramesRendererWebGL}; diff --git a/static/app/views/replays/detail/accessibility/accessibilityHeaderCell.tsx b/static/app/views/replays/detail/accessibility/accessibilityHeaderCell.tsx index 6c2a778d9fc647..8988a10de83132 100644 --- a/static/app/views/replays/detail/accessibility/accessibilityHeaderCell.tsx +++ b/static/app/views/replays/detail/accessibility/accessibilityHeaderCell.tsx @@ -27,8 +27,6 @@ const COLUMNS: { label: t('Type'), }, {field: 'element', label: t('Element')}, - - {field: 'timestampMs', label: t('Timestamp')}, ]; export const COLUMN_COUNT = COLUMNS.length; diff --git a/static/app/views/replays/detail/accessibility/accessibilityTableCell.tsx b/static/app/views/replays/detail/accessibility/accessibilityTableCell.tsx index 27d84270a49dcc..b4f0408d579344 100644 --- a/static/app/views/replays/detail/accessibility/accessibilityTableCell.tsx +++ b/static/app/views/replays/detail/accessibility/accessibilityTableCell.tsx @@ -2,7 +2,6 @@ import {ComponentProps, CSSProperties, forwardRef} from 'react'; import classNames from 'classnames'; import { - ButtonWrapper, Cell, CodeHighlightCell, Text, @@ -14,7 +13,6 @@ import {HydratedA11yFrame} from 'sentry/utils/replays/hydrateA11yFrame'; import {Color} from 'sentry/utils/theme'; import useUrlParams from 'sentry/utils/useUrlParams'; import useSortAccessibility from 'sentry/views/replays/detail/accessibility/useSortAccessibility'; -import TimestampButton from 'sentry/views/replays/detail/timestampButton'; const EMPTY_CELL = '--'; @@ -33,7 +31,7 @@ interface Props extends ReturnType { onClickCell: (props: {dataIndex: number; rowIndex: number}) => void; rowIndex: number; sortConfig: ReturnType['sortConfig']; - startTimestampMs: number; + // startTimestampMs: number; style: CSSProperties; } @@ -45,12 +43,10 @@ const AccessibilityTableCell = forwardRef( currentHoverTime, currentTime, onClickCell, - onClickTimestamp, onMouseEnter, onMouseLeave, rowIndex, sortConfig, - startTimestampMs, style, }: Props, ref @@ -127,21 +123,6 @@ const AccessibilityTableCell = forwardRef( ), - () => ( - - - { - event.stopPropagation(); - onClickTimestamp(a11yIssue); - }} - startTimestampMs={startTimestampMs} - timestampMs={a11yIssue.timestampMs} - /> - - - ), ]; return renderFns[columnIndex](); diff --git a/static/app/views/replays/detail/accessibility/details/index.tsx b/static/app/views/replays/detail/accessibility/details/index.tsx index 903527268ac433..0c876d9db65045 100644 --- a/static/app/views/replays/detail/accessibility/details/index.tsx +++ b/static/app/views/replays/detail/accessibility/details/index.tsx @@ -14,7 +14,6 @@ import SplitDivider from 'sentry/views/replays/detail/layout/splitDivider'; type Props = { item: null | HydratedA11yFrame; onClose: () => void; - startTimestampMs: number; } & Omit, 'size'>; function AccessibilityDetails({ @@ -23,7 +22,6 @@ function AccessibilityDetails({ onClose, onDoubleClick, onMouseDown, - startTimestampMs, }: Props) { if (!item) { return null; @@ -52,7 +50,7 @@ function AccessibilityDetails({ - + ); } diff --git a/static/app/views/replays/detail/accessibility/details/sections.tsx b/static/app/views/replays/detail/accessibility/details/sections.tsx index ae3eb53b85c512..47f33309fe4afd 100644 --- a/static/app/views/replays/detail/accessibility/details/sections.tsx +++ b/static/app/views/replays/detail/accessibility/details/sections.tsx @@ -1,8 +1,6 @@ -import {MouseEvent} from 'react'; import beautify from 'js-beautify'; import {CodeSnippet} from 'sentry/components/codeSnippet'; -import {useReplayContext} from 'sentry/components/replays/replayContext'; import {t} from 'sentry/locale'; import type {HydratedA11yFrame} from 'sentry/utils/replays/hydrateA11yFrame'; import { @@ -10,11 +8,9 @@ import { KeyValueTuple, SectionItem, } from 'sentry/views/replays/detail/accessibility/details/components'; -import TimestampButton from 'sentry/views/replays/detail/timestampButton'; export type SectionProps = { item: HydratedA11yFrame; - startTimestampMs: number; }; export function ElementSection({item}: SectionProps) { @@ -27,9 +23,7 @@ export function ElementSection({item}: SectionProps) { ); } -export function GeneralSection({item, startTimestampMs}: SectionProps) { - const {setCurrentTime} = useReplayContext(); - +export function GeneralSection({item}: SectionProps) { const data: KeyValueTuple[] = [ { key: t('Impact'), @@ -39,20 +33,6 @@ export function GeneralSection({item, startTimestampMs}: SectionProps) { {key: t('Type'), value: item.id}, {key: t('Help'), value: {item.description}}, {key: t('Path'), value: item.element.target.join(' ')}, - { - key: t('Timestamp'), - value: ( - { - event.stopPropagation(); - setCurrentTime(item.offsetMs); - }} - startTimestampMs={startTimestampMs} - timestampMs={item.timestampMs} - /> - ), - }, ]; return ( diff --git a/static/app/views/replays/detail/accessibility/index.tsx b/static/app/views/replays/detail/accessibility/index.tsx index 765e11927a08b6..1c4cb77ec796a3 100644 --- a/static/app/views/replays/detail/accessibility/index.tsx +++ b/static/app/views/replays/detail/accessibility/index.tsx @@ -36,11 +36,10 @@ const cellMeasurer = { }; function AccessibilityList() { - const {currentTime, currentHoverTime, replay} = useReplayContext(); + const {currentTime, currentHoverTime} = useReplayContext(); const {onMouseEnter, onMouseLeave, onClickTimestamp} = useCrumbHandlers(); const {data: accessibilityData, isLoading} = useA11yData(); - const startTimestampMs = replay?.getReplay()?.started_at?.getTime() || 0; const [scrollToRow, setScrollToRow] = useState(undefined); @@ -149,7 +148,6 @@ function AccessibilityList() { ref={e => e && registerChild?.(e)} rowIndex={rowIndex} sortConfig={sortConfig} - startTimestampMs={startTimestampMs} style={{...style, height: BODY_HEIGHT}} /> ) @@ -227,7 +225,6 @@ function AccessibilityList() { onClose={() => { setDetailRow(''); }} - startTimestampMs={startTimestampMs} /> diff --git a/tests/sentry/api/endpoints/test_organization_sentry_app_installations.py b/tests/sentry/api/endpoints/test_organization_sentry_app_installations.py index 95b24199a19e83..d4c72895de5e64 100644 --- a/tests/sentry/api/endpoints/test_organization_sentry_app_installations.py +++ b/tests/sentry/api/endpoints/test_organization_sentry_app_installations.py @@ -1,3 +1,4 @@ +from sentry.api.fields.sentry_slug import DEFAULT_SLUG_ERROR_MESSAGE from sentry.models.integrations.sentry_app_installation import SentryAppInstallation from sentry.testutils.cases import APITestCase from sentry.testutils.silo import control_silo_test @@ -121,3 +122,8 @@ def test_install_twice(self): assert SentryAppInstallation.objects.filter(sentry_app=app).count() == 1 assert response.status_code == 200 + + def test_invalid_numeric_slug(self): + self.login_as(user=self.user) + response = self.get_error_response(self.org.slug, slug="1234", status_code=400) + assert response.data["slug"][0] == DEFAULT_SLUG_ERROR_MESSAGE diff --git a/tests/sentry/api/endpoints/test_user_notification_details.py b/tests/sentry/api/endpoints/test_user_notification_details.py index 72d2b807f28c9d..1ce620bf957921 100644 --- a/tests/sentry/api/endpoints/test_user_notification_details.py +++ b/tests/sentry/api/endpoints/test_user_notification_details.py @@ -1,5 +1,10 @@ +from sentry.models.notificationsetting import NotificationSetting +from sentry.models.notificationsettingoption import NotificationSettingOption +from sentry.models.notificationsettingprovider import NotificationSettingProvider +from sentry.notifications.types import NotificationSettingOptionValues, NotificationSettingTypes from sentry.testutils.cases import APITestCase from sentry.testutils.silo import control_silo_test +from sentry.types.integrations import ExternalProviders class UserNotificationDetailsTestBase(APITestCase): @@ -30,11 +35,46 @@ def test_returns_correct_defaults(self): In this test we add existing per-project and per-organization Notification settings in order to test that defaults are correct. """ + # default is 3 + NotificationSetting.objects.update_settings( + ExternalProviders.EMAIL, + NotificationSettingTypes.DEPLOY, + NotificationSettingOptionValues.NEVER, + user_id=self.user.id, + organization=self.organization, + ) + + # default is NotificationSettingOptionValues.COMMITTED_ONLY + NotificationSetting.objects.update_settings( + ExternalProviders.EMAIL, + NotificationSettingTypes.WORKFLOW, + NotificationSettingOptionValues.ALWAYS, + user_id=self.user.id, + organization=self.organization, + ) response = self.get_success_response("me") + assert response.data.get("deployNotifications") == 3 assert response.data.get("personalActivityNotifications") is False assert response.data.get("selfAssignOnResolve") is False + assert response.data.get("subscribeByDefault") is True + assert response.data.get("workflowNotifications") == 1 + + def test_subscribe_by_default(self): + """ + Test that we expect project-independent issue alert preferences to be + returned as `subscribe_by_default`. + """ + NotificationSetting.objects.update_settings( + ExternalProviders.EMAIL, + NotificationSettingTypes.ISSUE_ALERTS, + NotificationSettingOptionValues.NEVER, + user_id=self.user.id, + ) + + response = self.get_success_response("me") + assert response.data.get("subscribeByDefault") is False @control_silo_test @@ -45,10 +85,55 @@ def test_saves_and_returns_values(self): org = self.create_organization() self.create_member(user=self.user, organization=org) data = { + "deployNotifications": 2, "personalActivityNotifications": True, "selfAssignOnResolve": True, } self.get_success_response("me", **data) + value = NotificationSetting.objects.get_settings( + ExternalProviders.EMAIL, + NotificationSettingTypes.DEPLOY, + user_id=self.user.id, + ) + assert value == NotificationSettingOptionValues.ALWAYS + + query_args = { + "user_id": self.user.id, + "team_id": None, + "value": "always", + "scope_type": "user", + "scope_identifier": self.user.id, + } + assert NotificationSettingOption.objects.filter(**query_args).exists() + assert NotificationSettingProvider.objects.filter(**query_args, provider="email") + + def test_saves_and_returns_values_when_defaults_present(self): + NotificationSetting.objects.update_settings( + ExternalProviders.EMAIL, + NotificationSettingTypes.DEPLOY, + NotificationSettingOptionValues.NEVER, + user_id=self.user.id, + organization=self.organization, + ) + + response = self.get_success_response("me", **{"deployNotifications": 2}) + assert response.data.get("deployNotifications") == 2 + + value1 = NotificationSetting.objects.get_settings( + ExternalProviders.EMAIL, + NotificationSettingTypes.DEPLOY, + user_id=self.user.id, + organization=self.organization, + ) + value2 = NotificationSetting.objects.get_settings( + ExternalProviders.EMAIL, + NotificationSettingTypes.DEPLOY, + user_id=self.user.id, + ) + + assert value1 == NotificationSettingOptionValues.NEVER + assert value2 == NotificationSettingOptionValues.ALWAYS + def test_reject_invalid_values(self): - self.get_error_response("me", status_code=400, **{"personalActivityNotifications": 6}) + self.get_error_response("me", status_code=400, **{"deployNotifications": 6}) diff --git a/tests/sentry/api/endpoints/test_user_notification_email.py b/tests/sentry/api/endpoints/test_user_notification_email.py deleted file mode 100644 index a540666b6a5181..00000000000000 --- a/tests/sentry/api/endpoints/test_user_notification_email.py +++ /dev/null @@ -1,77 +0,0 @@ -from sentry.models.options.user_option import UserOption -from sentry.models.useremail import UserEmail -from sentry.testutils.cases import APITestCase -from sentry.testutils.silo import control_silo_test - - -class UserNotificationEmailTestBase(APITestCase): - endpoint = "sentry-api-0-user-notifications-email" - - def setUp(self): - self.organization2 = self.create_organization(name="Another Org", owner=self.user) - self.project2 = self.create_project( - organization=self.organization, teams=[self.team], name="Another Name" - ) - - self.login_as(user=self.user) - - -@control_silo_test(stable=True) -class UserNotificationEmailGetTest(UserNotificationEmailTestBase): - def test_populates_useroptions_for_email(self): - UserEmail.objects.create(user=self.user, email="alias@example.com", is_verified=True).save() - UserEmail.objects.create( - user=self.user, email="alias2@example.com", is_verified=True - ).save() - - UserOption.objects.set_value( - user=self.user, project=self.project, key="mail:email", value="alias@example.com" - ) - UserOption.objects.set_value( - user=self.user, project=self.project2, key="mail:email", value="alias2@example.com" - ) - - response = self.get_success_response("me") - - assert response.data == { - str(self.project.id): "alias@example.com", - str(self.project2.id): "alias2@example.com", - } - - -# TODO(hybrid-cloud): Fix underlying logic, which is not silo safe -@control_silo_test() -class UserNotificationEmailTest(UserNotificationEmailTestBase): - method = "put" - - def test_saves_and_returns_email_routing(self): - UserEmail.objects.create(user=self.user, email="alias@example.com", is_verified=True).save() - email = self.user.email - - data = {str(self.project.id): email, str(self.project2.id): "alias@example.com"} - self.get_success_response("me", status_code=204, **data) - - value1 = UserOption.objects.get( - user=self.user, project_id=self.project.id, key="mail:email" - ).value - value2 = UserOption.objects.get( - user=self.user, project_id=self.project2.id, key="mail:email" - ).value - - assert value1 == email - assert value2 == "alias@example.com" - - def test_email_routing_emails_must_be_verified(self): - UserEmail.objects.create( - user=self.user, email="alias@example.com", is_verified=False - ).save() - - data = {str(self.project.id): "alias@example.com"} - self.get_error_response("me", status_code=400, **data) - - def test_email_routing_emails_must_be_valid(self): - new_user = self.create_user(email="b@example.com") - UserEmail.objects.create(user=new_user, email="alias2@example.com", is_verified=True).save() - - data = {str(self.project2.id): "alias2@example.com"} - self.get_error_response("me", status_code=400, **data) diff --git a/tests/sentry/api/endpoints/test_user_notification_fine_tuning.py b/tests/sentry/api/endpoints/test_user_notification_fine_tuning.py new file mode 100644 index 00000000000000..64d07cdd339dc5 --- /dev/null +++ b/tests/sentry/api/endpoints/test_user_notification_fine_tuning.py @@ -0,0 +1,407 @@ +from sentry.models.notificationsetting import NotificationSetting +from sentry.models.notificationsettingoption import NotificationSettingOption +from sentry.models.options.user_option import UserOption +from sentry.models.useremail import UserEmail +from sentry.notifications.types import NotificationSettingOptionValues, NotificationSettingTypes +from sentry.testutils.cases import APITestCase +from sentry.testutils.silo import control_silo_test +from sentry.types.integrations import ExternalProviders + + +class UserNotificationFineTuningTestBase(APITestCase): + endpoint = "sentry-api-0-user-notifications-fine-tuning" + + def setUp(self): + self.organization2 = self.create_organization(name="Another Org", owner=self.user) + self.project2 = self.create_project( + organization=self.organization, teams=[self.team], name="Another Name" + ) + + self.login_as(user=self.user) + + def test_invalid_notification_type(self): + """This is run twice because of inheritance.""" + self.get_error_response("me", "invalid", status_code=404) + + +@control_silo_test +class UserNotificationFineTuningGetTest(UserNotificationFineTuningTestBase): + def test_returns_correct_defaults(self): + NotificationSetting.objects.update_settings( + ExternalProviders.EMAIL, + NotificationSettingTypes.ISSUE_ALERTS, + NotificationSettingOptionValues.ALWAYS, + user_id=self.user.id, + project=self.project, + ) + response = self.get_success_response("me", "alerts") + assert response.data.get(self.project.id) == "1" + + NotificationSetting.objects.update_settings( + ExternalProviders.EMAIL, + NotificationSettingTypes.DEPLOY, + NotificationSettingOptionValues.ALWAYS, + user_id=self.user.id, + organization=self.organization, + ) + response = self.get_success_response("me", "deploy") + assert response.data.get(self.organization.id) == "2" + + UserOption.objects.create( + user=self.user, + organization_id=None, + key="reports:disabled-organizations", + value=[self.organization.id], + ) + response = self.get_success_response("me", "reports") + assert response.data.get(self.organization.id) == "0" + + +# TODO(hybrid-cloud): Fix underlying logic, which is not silo safe +@control_silo_test() +class UserNotificationFineTuningTest(UserNotificationFineTuningTestBase): + method = "put" + + def test_update_invalid_project(self): + # We do not validate project / organization ids! Due to the silo split we accept these at face value rather than fan out + # across all silos to validate them. + self.get_response("me", "alerts", **{"123": 1}) + + def test_invalid_id_value(self): + self.get_error_response("me", "alerts", status_code=400, **{"nope": 1}) + + def test_saves_and_returns_alerts(self): + data = {str(self.project.id): 1, str(self.project2.id): 0} + self.get_success_response("me", "alerts", status_code=204, **data) + + value1 = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.ISSUE_ALERTS, + user_id=self.user.id, + project=self.project, + ) + + value2 = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.ISSUE_ALERTS, + user_id=self.user.id, + project=self.project2, + ) + + assert value1 == NotificationSettingOptionValues.ALWAYS + assert value2 == NotificationSettingOptionValues.NEVER + + # Can return to default + data = {str(self.project.id): -1} + self.get_success_response("me", "alerts", status_code=204, **data) + + value1 = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.ISSUE_ALERTS, + user_id=self.user.id, + project=self.project, + ) + value2 = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.ISSUE_ALERTS, + user_id=self.user.id, + project=self.project2, + ) + + assert value1 == NotificationSettingOptionValues.DEFAULT + assert value2 == NotificationSettingOptionValues.NEVER + + def test_saves_and_returns_workflow(self): + data = {str(self.project.id): 1, str(self.project2.id): 2} + self.get_success_response("me", "workflow", status_code=204, **data) + + value = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.WORKFLOW, + user_id=self.user.id, + project=self.project, + ) + value2 = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.WORKFLOW, + user_id=self.user.id, + project=self.project2, + ) + + assert value == NotificationSettingOptionValues.SUBSCRIBE_ONLY + assert value2 == NotificationSettingOptionValues.NEVER + + # Can return to default + data = {str(self.project.id): -1} + self.get_success_response("me", "workflow", status_code=204, **data) + + value1 = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.WORKFLOW, + user_id=self.user.id, + project=self.project, + ) + value2 = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.WORKFLOW, + user_id=self.user.id, + project=self.project2, + ) + + assert value1 == NotificationSettingOptionValues.DEFAULT + assert value2 == NotificationSettingOptionValues.NEVER + + def test_double_write(self): + data = {str(self.project.id): 1, str(self.project2.id): 2} + self.get_success_response("me", "workflow", status_code=204, **data) + + value = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.WORKFLOW, + user_id=self.user.id, + project=self.project, + ) + value2 = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.WORKFLOW, + user_id=self.user.id, + project=self.project2, + ) + + assert value == NotificationSettingOptionValues.SUBSCRIBE_ONLY + assert value2 == NotificationSettingOptionValues.NEVER + + query_args = { + "user_id": self.user.id, + "team_id": None, + } + + assert NotificationSettingOption.objects.filter( + **query_args, + scope_type="project", + scope_identifier=self.project.id, + value="subscribe_only", + ).exists() + assert NotificationSettingOption.objects.filter( + **query_args, + scope_type="project", + scope_identifier=self.project2.id, + value="never", + ).exists() + + # Can return to default + data = {str(self.project.id): -1} + self.get_success_response("me", "workflow", status_code=204, **data) + + value1 = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.WORKFLOW, + user_id=self.user.id, + project=self.project, + ) + value2 = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.WORKFLOW, + user_id=self.user.id, + project=self.project2, + ) + + assert value1 == NotificationSettingOptionValues.DEFAULT + assert value2 == NotificationSettingOptionValues.NEVER + + query_args = { + "user_id": self.user.id, + "team_id": None, + } + + assert NotificationSettingOption.objects.filter( + **query_args, + scope_type="project", + scope_identifier=self.project2.id, + value="never", + ).exists() + + def test_saves_and_returns_email_routing(self): + UserEmail.objects.create(user=self.user, email="alias@example.com", is_verified=True).save() + email = self.user.email + + data = {str(self.project.id): email, str(self.project2.id): "alias@example.com"} + self.get_success_response("me", "email", status_code=204, **data) + + value1 = UserOption.objects.get( + user=self.user, project_id=self.project.id, key="mail:email" + ).value + value2 = UserOption.objects.get( + user=self.user, project_id=self.project2.id, key="mail:email" + ).value + + assert value1 == email + assert value2 == "alias@example.com" + + def test_email_routing_emails_must_be_verified(self): + UserEmail.objects.create( + user=self.user, email="alias@example.com", is_verified=False + ).save() + + data = {str(self.project.id): "alias@example.com"} + self.get_error_response("me", "email", status_code=400, **data) + + def test_email_routing_emails_must_be_valid(self): + new_user = self.create_user(email="b@example.com") + UserEmail.objects.create(user=new_user, email="alias2@example.com", is_verified=True).save() + + data = {str(self.project2.id): "alias2@example.com"} + self.get_error_response("me", "email", status_code=400, **data) + + def test_saves_and_returns_deploy(self): + data = {str(self.organization.id): 4} + self.get_success_response("me", "deploy", status_code=204, **data) + + value = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.DEPLOY, + user_id=self.user.id, + organization=self.organization, + ) + assert value == NotificationSettingOptionValues.NEVER + + data = {str(self.organization.id): 2} + self.get_success_response("me", "deploy", status_code=204, **data) + + value = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.DEPLOY, + user_id=self.user.id, + organization=self.organization, + ) + assert value == NotificationSettingOptionValues.ALWAYS + + data = {str(self.organization.id): -1} + self.get_success_response("me", "deploy", status_code=204, **data) + + value = NotificationSetting.objects.get_settings( + provider=ExternalProviders.EMAIL, + type=NotificationSettingTypes.DEPLOY, + user_id=self.user.id, + organization=self.organization, + ) + assert value == NotificationSettingOptionValues.DEFAULT + + def test_saves_and_returns_weekly_reports(self): + data = {str(self.organization.id): 0, str(self.organization2.id): "0"} + self.get_success_response("me", "reports", status_code=204, **data) + + assert set( + UserOption.objects.get(user=self.user, key="reports:disabled-organizations").value + ) == {self.organization.id, self.organization2.id} + + data = {str(self.organization.id): 1} + self.get_success_response("me", "reports", status_code=204, **data) + assert set( + UserOption.objects.get(user=self.user, key="reports:disabled-organizations").value + ) == {self.organization2.id} + + data = {str(self.organization.id): 0} + self.get_success_response("me", "reports", status_code=204, **data) + assert set( + UserOption.objects.get(user=self.user, key="reports:disabled-organizations").value + ) == {self.organization.id, self.organization2.id} + + def test_enable_weekly_reports_from_default_setting(self): + data = {str(self.organization.id): 1, str(self.organization2.id): "1"} + self.get_success_response("me", "reports", status_code=204, **data) + + assert ( + set(UserOption.objects.get(user=self.user, key="reports:disabled-organizations").value) + == set() + ) + + # can disable + data = {str(self.organization.id): 0} + self.get_success_response("me", "reports", status_code=204, **data) + assert set( + UserOption.objects.get(user=self.user, key="reports:disabled-organizations").value + ) == {self.organization.id} + + # re-enable + data = {str(self.organization.id): 1} + self.get_success_response("me", "reports", status_code=204, **data) + assert ( + set(UserOption.objects.get(user=self.user, key="reports:disabled-organizations").value) + == set() + ) + + def test_double_write_weekly_report(self): + data = {str(self.organization.id): 1, str(self.organization2.id): "1"} + self.get_success_response("me", "reports", status_code=204, **data) + + assert ( + set(UserOption.objects.get(user=self.user, key="reports:disabled-organizations").value) + == set() + ) + assert NotificationSettingOption.objects.filter( + user_id=self.user.id, + scope_type="organization", + scope_identifier=self.organization.id, + value="always", + type="reports", + ).exists() + assert NotificationSettingOption.objects.filter( + user_id=self.user.id, + scope_type="organization", + scope_identifier=self.organization2.id, + value="always", + type="reports", + ).exists() + + # can disable + data = {str(self.organization.id): 0} + self.get_success_response("me", "reports", status_code=204, **data) + assert set( + UserOption.objects.get(user=self.user, key="reports:disabled-organizations").value + ) == {self.organization.id} + assert NotificationSettingOption.objects.filter( + user_id=self.user.id, + scope_type="organization", + scope_identifier=self.organization.id, + value="never", + type="reports", + ).exists() + + # re-enable + data = {str(self.organization.id): 1} + self.get_success_response("me", "reports", status_code=204, **data) + assert ( + set(UserOption.objects.get(user=self.user, key="reports:disabled-organizations").value) + == set() + ) + assert NotificationSettingOption.objects.filter( + user_id=self.user.id, + scope_type="organization", + scope_identifier=self.organization.id, + value="always", + type="reports", + ).exists() + + def test_permissions(self): + new_user = self.create_user(email="b@example.com") + new_org = self.create_organization(name="New Org") + new_team = self.create_team(name="New Team", organization=new_org, members=[new_user]) + new_project = self.create_project( + organization=new_org, teams=[new_team], name="New Project" + ) + + data = {str(new_org.id): 0} + self.get_error_response("me", "reports", status_code=403, **data) + + assert not UserOption.objects.filter( + user=self.user, organization_id=new_org.id, key="reports" + ).exists() + + value = NotificationSetting.objects.get_settings( + ExternalProviders.EMAIL, + NotificationSettingTypes.ISSUE_ALERTS, + user_id=self.user.id, + project=new_project, + ) + assert value == NotificationSettingOptionValues.DEFAULT diff --git a/tests/sentry/integrations/slack/notifications/test_assigned.py b/tests/sentry/integrations/slack/notifications/test_assigned.py index 02634711045ca3..3a012d9863ecd9 100644 --- a/tests/sentry/integrations/slack/notifications/test_assigned.py +++ b/tests/sentry/integrations/slack/notifications/test_assigned.py @@ -9,7 +9,7 @@ from sentry.notifications.notifications.activity.assigned import AssignedActivityNotification from sentry.testutils.cases import PerformanceIssueTestCase, SlackActivityNotificationTest from sentry.testutils.helpers.notifications import TEST_ISSUE_OCCURRENCE, TEST_PERF_ISSUE_OCCURRENCE -from sentry.testutils.helpers.slack import get_attachment +from sentry.testutils.helpers.slack import get_attachment, send_notification from sentry.testutils.skips import requires_snuba from sentry.types.activity import ActivityType from sentry.types.integrations import ExternalProviders @@ -30,7 +30,8 @@ def create_notification(self, group, notification): ) @responses.activate - def test_multiple_identities(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_multiple_identities(self, mock_func): """ Test that we notify a user with multiple Identities in each place """ @@ -76,7 +77,8 @@ def test_multiple_identities(self): assert channel == identity2.external_id @responses.activate - def test_multiple_orgs(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_multiple_orgs(self, mock_func): """ Test that if a user is in 2 orgs with Slack and has an Identity linked in each, we're only going to notify them for the relevant org @@ -119,7 +121,8 @@ def test_multiple_orgs(self): assert channel == self.identity.external_id @responses.activate - def test_assignment(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_assignment(self, mock_func): """ Test that a Slack message is sent with the expected payload when an issue is assigned """ @@ -140,7 +143,8 @@ def test_assignment(self): return_value=TEST_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_assignment_generic_issue(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_assignment_generic_issue(self, mock_func, occurrence): """ Test that a Slack message is sent with the expected payload when a generic issue type is assigned """ @@ -163,7 +167,8 @@ def test_assignment_generic_issue(self, occurrence): return_value=TEST_PERF_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_assignment_performance_issue(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_assignment_performance_issue(self, mock_func, occurrence): """ Test that a Slack message is sent with the expected payload when a performance issue is assigned """ diff --git a/tests/sentry/integrations/slack/notifications/test_deploy.py b/tests/sentry/integrations/slack/notifications/test_deploy.py index 096400790d4834..afee18620bd229 100644 --- a/tests/sentry/integrations/slack/notifications/test_deploy.py +++ b/tests/sentry/integrations/slack/notifications/test_deploy.py @@ -1,3 +1,5 @@ +from unittest import mock + import responses from django.utils import timezone @@ -6,7 +8,7 @@ from sentry.models.release import Release from sentry.notifications.notifications.activity.release import ReleaseActivityNotification from sentry.testutils.cases import SlackActivityNotificationTest -from sentry.testutils.helpers.slack import get_attachment +from sentry.testutils.helpers.slack import get_attachment, send_notification from sentry.testutils.silo import region_silo_test from sentry.types.activity import ActivityType @@ -14,7 +16,8 @@ @region_silo_test class SlackDeployNotificationTest(SlackActivityNotificationTest): @responses.activate - def test_deploy(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_deploy(self, mock_func): """ Test that a Slack message is sent with the expected payload when a deploy happens. """ diff --git a/tests/sentry/integrations/slack/notifications/test_escalating.py b/tests/sentry/integrations/slack/notifications/test_escalating.py index 62c02c47ce24e4..1a69590e3cc6c5 100644 --- a/tests/sentry/integrations/slack/notifications/test_escalating.py +++ b/tests/sentry/integrations/slack/notifications/test_escalating.py @@ -6,7 +6,7 @@ from sentry.notifications.notifications.activity.escalating import EscalatingActivityNotification from sentry.testutils.cases import PerformanceIssueTestCase, SlackActivityNotificationTest from sentry.testutils.helpers.notifications import TEST_ISSUE_OCCURRENCE, TEST_PERF_ISSUE_OCCURRENCE -from sentry.testutils.helpers.slack import get_attachment +from sentry.testutils.helpers.slack import get_attachment, send_notification from sentry.testutils.skips import requires_snuba from sentry.types.activity import ActivityType @@ -26,7 +26,8 @@ def create_notification(self, group): ) @responses.activate - def test_escalating(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_escalating(self, mock_func): """ Test that a Slack message is sent with the expected payload when an issue escalates """ @@ -52,7 +53,8 @@ def test_escalating(self): return_value=TEST_PERF_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_escalating_performance_issue(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_escalating_performance_issue(self, mock_func, occurrence): """ Test that a Slack message is sent with the expected payload when a performance issue escalates """ @@ -83,7 +85,8 @@ def test_escalating_performance_issue(self, occurrence): return_value=TEST_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_escalating_generic_issue(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_escalating_generic_issue(self, mock_func, occurrence): """ Test that a Slack message is sent with the expected payload when a generic issue type escalates """ diff --git a/tests/sentry/integrations/slack/notifications/test_issue_alert.py b/tests/sentry/integrations/slack/notifications/test_issue_alert.py index 5628b8eab84a73..3604c2f4e6b2b5 100644 --- a/tests/sentry/integrations/slack/notifications/test_issue_alert.py +++ b/tests/sentry/integrations/slack/notifications/test_issue_alert.py @@ -26,7 +26,7 @@ from sentry.tasks.digests import deliver_digest from sentry.testutils.cases import PerformanceIssueTestCase, SlackActivityNotificationTest from sentry.testutils.helpers.notifications import TEST_ISSUE_OCCURRENCE, TEST_PERF_ISSUE_OCCURRENCE -from sentry.testutils.helpers.slack import get_attachment +from sentry.testutils.helpers.slack import get_attachment, send_notification from sentry.testutils.silo import assume_test_silo_mode, region_silo_test from sentry.testutils.skips import requires_snuba from sentry.types.integrations import ExternalProviders @@ -54,7 +54,8 @@ def setUp(self): ) @responses.activate - def test_issue_alert_user(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_issue_alert_user(self, mock_func): """Test that issue alerts are sent to a Slack user.""" event = self.store_event( @@ -90,7 +91,8 @@ def test_issue_alert_user(self): return_value=TEST_PERF_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_performance_issue_alert_user(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_performance_issue_alert_user(self, mock_func, occurrence): """Test that performance issue alerts are sent to a Slack user.""" event = self.create_performance_issue() @@ -112,7 +114,8 @@ def test_performance_issue_alert_user(self, occurrence): return_value=TEST_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_generic_issue_alert_user(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_generic_issue_alert_user(self, mock_func, occurrence): """Test that generic issue alerts are sent to a Slack user.""" event = self.store_event( data={"message": "Hellboy's world", "level": "error"}, project_id=self.project.id @@ -131,7 +134,8 @@ def test_generic_issue_alert_user(self, occurrence): ) @responses.activate - def test_disabled_org_integration_for_user(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_disabled_org_integration_for_user(self, mock_func): with assume_test_silo_mode(SiloMode.CONTROL): OrganizationIntegration.objects.get(integration=self.integration).update( status=ObjectStatus.DISABLED @@ -151,7 +155,8 @@ def test_disabled_org_integration_for_user(self): assert len(responses.calls) == 0 @responses.activate - def test_issue_alert_issue_owners(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_issue_alert_issue_owners(self, mock_func): """Test that issue alerts are sent to issue owners in Slack.""" event = self.store_event( @@ -193,7 +198,8 @@ def test_issue_alert_issue_owners(self): ) @responses.activate - def test_issue_alert_issue_owners_environment(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_issue_alert_issue_owners_environment(self, mock_func): """Test that issue alerts are sent to issue owners in Slack with the environment in the query params when the alert rule filters by environment.""" environment = self.create_environment(self.project, name="production") @@ -243,7 +249,8 @@ def test_issue_alert_issue_owners_environment(self): ) @responses.activate - def test_issue_alert_team_issue_owners(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_issue_alert_team_issue_owners(self, mock_func): """Test that issue alerts are sent to a team in Slack via an Issue Owners rule action.""" # add a second user to the team so we can be sure it's only @@ -334,7 +341,8 @@ def test_issue_alert_team_issue_owners(self): ) @responses.activate - def test_disabled_org_integration_for_team(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_disabled_org_integration_for_team(self, mock_func): # update the team's notification settings ExternalActor.objects.create( team_id=self.team.id, @@ -389,8 +397,9 @@ def test_disabled_org_integration_for_team(self): assert len(responses.calls) == 0 @responses.activate + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) @patch.object(sentry, "digests") - def test_issue_alert_team_issue_owners_user_settings_off_digests(self, digests): + def test_issue_alert_team_issue_owners_user_settings_off_digests(self, digests, mock_func): """Test that issue alerts are sent to a team in Slack via an Issue Owners rule action even when the users' issue alert notification settings are off and digests are triggered.""" @@ -495,7 +504,8 @@ def test_issue_alert_team_issue_owners_user_settings_off_digests(self, digests): ) @responses.activate - def test_issue_alert_team(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_issue_alert_team(self, mock_func): """Test that issue alerts are sent to a team in Slack.""" # add a second organization org = self.create_organization(owner=self.user) @@ -577,7 +587,8 @@ def test_issue_alert_team(self): ) @responses.activate - def test_issue_alert_team_new_project(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_issue_alert_team_new_project(self, mock_func): """Test that issue alerts are sent to a team in Slack when the team has added a new project""" # add a second user to the team so we can be sure it's only @@ -657,7 +668,8 @@ def test_issue_alert_team_new_project(self): ) @responses.activate - def test_not_issue_alert_team_removed_project(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_not_issue_alert_team_removed_project(self, mock_func): """Test that issue alerts are not sent to a team in Slack when the team has removed the project the issue belongs to""" # create the team's notification settings @@ -705,7 +717,8 @@ def test_not_issue_alert_team_removed_project(self): assert len(responses.calls) == 0 @responses.activate - def test_issue_alert_team_fallback(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_issue_alert_team_fallback(self, mock_func): """Test that issue alerts are sent to each member of a team in Slack.""" user2 = self.create_user(is_superuser=False) @@ -773,8 +786,9 @@ def test_issue_alert_team_fallback(self): ) @responses.activate + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) @patch.object(sentry, "digests") - def test_digest_enabled(self, digests): + def test_digest_enabled(self, digests, mock_func): """ Test that with digests enabled, but Slack notification settings (and not email settings), we send a Slack notification diff --git a/tests/sentry/integrations/slack/notifications/test_new_processing_issues.py b/tests/sentry/integrations/slack/notifications/test_new_processing_issues.py index f8dc7f4b946c0a..23a4c4482f79c7 100644 --- a/tests/sentry/integrations/slack/notifications/test_new_processing_issues.py +++ b/tests/sentry/integrations/slack/notifications/test_new_processing_issues.py @@ -1,3 +1,5 @@ +from unittest import mock + import responses from sentry.models.activity import Activity @@ -6,14 +8,15 @@ ) from sentry.testutils.cases import SlackActivityNotificationTest from sentry.testutils.helpers.features import with_feature -from sentry.testutils.helpers.slack import get_attachment +from sentry.testutils.helpers.slack import get_attachment, send_notification from sentry.types.activity import ActivityType from sentry.web.frontend.debug.debug_new_processing_issues_email import get_issues_data class SlackNewProcessingIssuesNotificationTest(SlackActivityNotificationTest): @responses.activate - def test_new_processing_issue(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_new_processing_issue(self, mock_func): """ Test that a Slack message is sent with the expected payload when an issue is held back in reprocessing """ @@ -48,8 +51,9 @@ def test_new_processing_issue(self): ) @responses.activate + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) @with_feature("organizations:customer-domains") - def test_new_processing_issue_customer_domains(self): + def test_new_processing_issue_customer_domains(self, mock_func): notification = NewProcessingIssuesActivityNotification( Activity( project=self.project, diff --git a/tests/sentry/integrations/slack/notifications/test_note.py b/tests/sentry/integrations/slack/notifications/test_note.py index ffa749be9a3502..11980244644536 100644 --- a/tests/sentry/integrations/slack/notifications/test_note.py +++ b/tests/sentry/integrations/slack/notifications/test_note.py @@ -6,7 +6,7 @@ from sentry.notifications.notifications.activity.note import NoteActivityNotification from sentry.testutils.cases import PerformanceIssueTestCase, SlackActivityNotificationTest from sentry.testutils.helpers.notifications import TEST_ISSUE_OCCURRENCE -from sentry.testutils.helpers.slack import get_attachment +from sentry.testutils.helpers.slack import get_attachment, send_notification from sentry.testutils.skips import requires_snuba from sentry.types.activity import ActivityType @@ -26,7 +26,8 @@ def create_notification(self, group): ) @responses.activate - def test_note(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_note(self, mock_func): """ Test that a Slack message is sent with the expected payload when a comment is made on an issue """ @@ -50,7 +51,8 @@ def test_note(self): ) @responses.activate - def test_note_performance_issue(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_note_performance_issue(self, mock_func): """ Test that a Slack message is sent with the expected payload when a comment is made on a performance issue """ @@ -80,7 +82,8 @@ def test_note_performance_issue(self): return_value=TEST_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_note_generic_issue(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_note_generic_issue(self, mock_func, occurrence): """ Test that a Slack message is sent with the expected payload when a comment is made on a generic issue type """ diff --git a/tests/sentry/integrations/slack/notifications/test_nudge.py b/tests/sentry/integrations/slack/notifications/test_nudge.py index a28fd04ff5bd71..f47ea81e13d379 100644 --- a/tests/sentry/integrations/slack/notifications/test_nudge.py +++ b/tests/sentry/integrations/slack/notifications/test_nudge.py @@ -1,3 +1,4 @@ +from unittest import mock from urllib.parse import parse_qs import responses @@ -7,7 +8,7 @@ IntegrationNudgeNotification, ) from sentry.testutils.cases import SlackActivityNotificationTest -from sentry.testutils.helpers.slack import get_attachment_no_text +from sentry.testutils.helpers.slack import get_attachment_no_text, send_notification from sentry.types.integrations import ExternalProviders from sentry.utils import json @@ -16,7 +17,8 @@ class SlackNudgeNotificationTest(SlackActivityNotificationTest): @responses.activate - def test_nudge(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_nudge(self, mock_func): notification = IntegrationNudgeNotification( self.organization, recipient=self.user, diff --git a/tests/sentry/integrations/slack/notifications/test_regression.py b/tests/sentry/integrations/slack/notifications/test_regression.py index 22bd62e34b1fa1..0850482fdde8c8 100644 --- a/tests/sentry/integrations/slack/notifications/test_regression.py +++ b/tests/sentry/integrations/slack/notifications/test_regression.py @@ -6,7 +6,7 @@ from sentry.notifications.notifications.activity.regression import RegressionActivityNotification from sentry.testutils.cases import PerformanceIssueTestCase, SlackActivityNotificationTest from sentry.testutils.helpers.notifications import TEST_ISSUE_OCCURRENCE, TEST_PERF_ISSUE_OCCURRENCE -from sentry.testutils.helpers.slack import get_attachment +from sentry.testutils.helpers.slack import get_attachment, send_notification from sentry.testutils.skips import requires_snuba from sentry.types.activity import ActivityType @@ -26,7 +26,8 @@ def create_notification(self, group, data=None): ) @responses.activate - def test_regression(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_regression(self, mock_func): """ Test that a Slack message is sent with the expected payload when an issue regresses """ @@ -44,7 +45,8 @@ def test_regression(self): ) @responses.activate - def test_regression_with_release(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_regression_with_release(self, mock_func): """ Test that a Slack message is sent with the expected payload when an issue regresses """ @@ -71,7 +73,8 @@ def test_regression_with_release(self): return_value=TEST_PERF_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_regression_performance_issue(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_regression_performance_issue(self, mock_func, occurrence): """ Test that a Slack message is sent with the expected payload when a performance issue regresses """ @@ -91,7 +94,8 @@ def test_regression_performance_issue(self, occurrence): return_value=TEST_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_regression_generic_issue(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_regression_generic_issue(self, mock_func, occurrence): """ Test that a Slack message is sent with the expected payload when a generic issue type regresses """ diff --git a/tests/sentry/integrations/slack/notifications/test_resolved.py b/tests/sentry/integrations/slack/notifications/test_resolved.py index 6de1600834a06c..3dceb33e354ef8 100644 --- a/tests/sentry/integrations/slack/notifications/test_resolved.py +++ b/tests/sentry/integrations/slack/notifications/test_resolved.py @@ -6,7 +6,7 @@ from sentry.notifications.notifications.activity.resolved import ResolvedActivityNotification from sentry.testutils.cases import PerformanceIssueTestCase, SlackActivityNotificationTest from sentry.testutils.helpers.notifications import TEST_ISSUE_OCCURRENCE, TEST_PERF_ISSUE_OCCURRENCE -from sentry.testutils.helpers.slack import get_attachment +from sentry.testutils.helpers.slack import get_attachment, send_notification from sentry.testutils.skips import requires_snuba from sentry.types.activity import ActivityType @@ -26,7 +26,8 @@ def create_notification(self, group): ) @responses.activate - def test_resolved(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_resolved(self, mock_func): """ Test that a Slack message is sent with the expected payload when an issue is resolved """ @@ -50,7 +51,8 @@ def test_resolved(self): return_value=TEST_PERF_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_resolved_performance_issue(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_resolved_performance_issue(self, mock_func, occurrence): """ Test that a Slack message is sent with the expected payload when a performance issue is resolved """ @@ -74,7 +76,8 @@ def test_resolved_performance_issue(self, occurrence): return_value=TEST_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_resolved_generic_issue(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_resolved_generic_issue(self, mock_func, occurrence): """ Test that a Slack message is sent with the expected payload when a generic issue type is resolved """ diff --git a/tests/sentry/integrations/slack/notifications/test_resolved_in_release.py b/tests/sentry/integrations/slack/notifications/test_resolved_in_release.py index 9d84cdfb879b79..579784550ddfda 100644 --- a/tests/sentry/integrations/slack/notifications/test_resolved_in_release.py +++ b/tests/sentry/integrations/slack/notifications/test_resolved_in_release.py @@ -8,7 +8,7 @@ ) from sentry.testutils.cases import PerformanceIssueTestCase, SlackActivityNotificationTest from sentry.testutils.helpers.notifications import TEST_ISSUE_OCCURRENCE, TEST_PERF_ISSUE_OCCURRENCE -from sentry.testutils.helpers.slack import get_attachment +from sentry.testutils.helpers.slack import get_attachment, send_notification from sentry.testutils.silo import region_silo_test from sentry.testutils.skips import requires_snuba from sentry.types.activity import ActivityType @@ -32,7 +32,8 @@ def create_notification(self, group, version="meow"): ) @responses.activate - def test_resolved_in_release(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_resolved_in_release(self, mock_func): """ Test that a Slack message is sent with the expected payload when an issue is resolved in a release """ @@ -55,7 +56,8 @@ def test_resolved_in_release(self): return_value=TEST_PERF_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_resolved_in_release_performance_issue(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_resolved_in_release_performance_issue(self, mock_func, occurrence): """ Test that a Slack message is sent with the expected payload when a performance issue is resolved in a release """ @@ -77,7 +79,8 @@ def test_resolved_in_release_performance_issue(self, occurrence): return_value=TEST_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_resolved_in_release_generic_issue(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_resolved_in_release_generic_issue(self, mock_func, occurrence): """ Test that a Slack message is sent with the expected payload when a generic issue type is resolved in a release """ @@ -97,7 +100,8 @@ def test_resolved_in_release_generic_issue(self, occurrence): ) @responses.activate - def test_resolved_in_release_parsed_version(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_resolved_in_release_parsed_version(self, mock_func): """ Test that the release version is formatted to the short version """ diff --git a/tests/sentry/integrations/slack/notifications/test_unassigned.py b/tests/sentry/integrations/slack/notifications/test_unassigned.py index 8fcb35762ce103..0e7a1ea8c3738d 100644 --- a/tests/sentry/integrations/slack/notifications/test_unassigned.py +++ b/tests/sentry/integrations/slack/notifications/test_unassigned.py @@ -6,7 +6,7 @@ from sentry.notifications.notifications.activity.unassigned import UnassignedActivityNotification from sentry.testutils.cases import PerformanceIssueTestCase, SlackActivityNotificationTest from sentry.testutils.helpers.notifications import TEST_ISSUE_OCCURRENCE, TEST_PERF_ISSUE_OCCURRENCE -from sentry.testutils.helpers.slack import get_attachment +from sentry.testutils.helpers.slack import get_attachment, send_notification from sentry.testutils.skips import requires_snuba from sentry.types.activity import ActivityType @@ -26,7 +26,8 @@ def create_notification(self, group): ) @responses.activate - def test_unassignment(self): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_unassignment(self, mock_func): """ Test that a Slack message is sent with the expected payload when an issue is unassigned """ @@ -48,7 +49,8 @@ def test_unassignment(self): return_value=TEST_PERF_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_unassignment_performance_issue(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_unassignment_performance_issue(self, mock_func, occurrence): """ Test that a Slack message is sent with the expected payload when a performance issue is unassigned """ @@ -68,7 +70,8 @@ def test_unassignment_performance_issue(self, occurrence): return_value=TEST_ISSUE_OCCURRENCE, new_callable=mock.PropertyMock, ) - def test_unassignment_generic_issue(self, occurrence): + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) + def test_unassignment_generic_issue(self, mock_func, occurrence): """ Test that a Slack message is sent with the expected payload when a generic issue type is unassigned """ diff --git a/tests/sentry/notifications/notifications/test_digests.py b/tests/sentry/notifications/notifications/test_digests.py index 678596997264bd..11b9ab969c463e 100644 --- a/tests/sentry/notifications/notifications/test_digests.py +++ b/tests/sentry/notifications/notifications/test_digests.py @@ -17,6 +17,7 @@ from sentry.tasks.digests import deliver_digest from sentry.testutils.cases import PerformanceIssueTestCase, SlackActivityNotificationTest, TestCase from sentry.testutils.helpers.datetime import before_now, iso_format +from sentry.testutils.helpers.slack import send_notification from sentry.testutils.skips import requires_snuba from sentry.utils import json from tests.sentry.issues.test_utils import OccurrenceTestMixin @@ -161,8 +162,9 @@ def test_sends_alert_rule_notification_to_each_member(self): class DigestSlackNotification(SlackActivityNotificationTest): @responses.activate + @mock.patch("sentry.notifications.notify.notify", side_effect=send_notification) @mock.patch.object(sentry, "digests") - def test_slack_digest_notification(self, digests): + def test_slack_digest_notification(self, digests, mock_func): """ Test that with digests enabled, but Slack notification settings (and not email settings), we send a Slack notification diff --git a/tests/sentry/tasks/integrations/github/test_open_pr_comment.py b/tests/sentry/tasks/integrations/github/test_open_pr_comment.py index cefd28a22b5c37..a3d16bdfe31081 100644 --- a/tests/sentry/tasks/integrations/github/test_open_pr_comment.py +++ b/tests/sentry/tasks/integrations/github/test_open_pr_comment.py @@ -1,7 +1,12 @@ from unittest.mock import patch +import pytest import responses +from sentry.models.group import Group +from sentry.models.options.organization_option import OrganizationOption +from sentry.models.pullrequest import CommentType, PullRequest, PullRequestComment +from sentry.shared_integrations.exceptions.base import ApiError from sentry.tasks.integrations.github.open_pr_comment import ( format_issue_table, format_open_pr_comment, @@ -9,6 +14,7 @@ get_pr_filenames, get_projects_and_filenames_from_source_file, get_top_5_issues_by_count_for_file, + open_pr_comment_workflow, safe_for_comment, ) from sentry.tasks.integrations.github.pr_comment import PullRequestIssue @@ -451,3 +457,241 @@ def test_comment_format(self): Did you find this useful? React with a 👍 or 👎 or let us know in #proj-github-pr-comments""" ) + + +@region_silo_test(stable=True) +class TestOpenPRCommentWorkflow(GithubCommentTestCase): + def setUp(self): + super().setUp() + self.user_id = "user_1" + self.app_id = "app_1" + self.pr = self.create_pr_issues() + self.groups = [ + { + "group_id": g.id, + "event_count": 1000 * (i + 1), + "affected_users": 1000 * (i + 1), + "is_handled": True, + } + for i, g in enumerate(Group.objects.all()) + ] + self.groups.reverse() + self.group_ids = [g["group_id"] for g in self.groups] + + @patch("sentry.tasks.integrations.github.open_pr_comment.get_pr_filenames") + @patch( + "sentry.tasks.integrations.github.open_pr_comment.get_projects_and_filenames_from_source_file" + ) + @patch("sentry.tasks.integrations.github.open_pr_comment.get_top_5_issues_by_count_for_file") + @patch("sentry.tasks.integrations.github.open_pr_comment.safe_for_comment", return_value=True) + @patch("sentry.tasks.integrations.github.pr_comment.metrics") + @responses.activate + def test_comment_workflow( + self, + mock_metrics, + mock_safe_for_comment, + mock_issues, + mock_reverse_codemappings, + mock_pr_filenames, + ): + # two filenames, the second one has a toggle table + mock_pr_filenames.return_value = ["foo.py", "bar.py"] + mock_reverse_codemappings.return_value = ([self.project], ["foo.py"]) + + mock_issues.return_value = self.groups + + responses.add( + responses.POST, + self.base_url + "/repos/getsentry/sentry/issues/1/comments", + json={"id": 1}, + headers={"X-Ratelimit-Limit": "60", "X-Ratelimit-Remaining": "59"}, + ) + + open_pr_comment_workflow(self.pr.id) + + assert ( + responses.calls[0].request.body + == f'{{"body": "## \\ud83d\\ude80 Sentry Issue Report\\nYou modified these files in this pull request and we noticed these issues associated with them.\\n\\n\\ud83d\\udcc4 **foo.py**\\n\\n| Issue | Additional Info |\\n| :--------- | :-------- |\\n| \\u203c\\ufe0f [**issue 2**](http://testserver/organizations/foobar/issues/{self.group_ids[0]}/?referrer=github-open-pr-bot) issue2 | `Handled:` **True** `Event Count:` **2k** `Users:` **2k** |\\n| \\u203c\\ufe0f [**issue 1**](http://testserver/organizations/foo/issues/{self.group_ids[1]}/?referrer=github-open-pr-bot) issue1 | `Handled:` **True** `Event Count:` **1k** `Users:` **1k** |\\n
\\n\\ud83d\\udcc4 bar.py (Click to Expand)\\n\\n| Issue | Additional Info |\\n| :--------- | :-------- |\\n| \\u203c\\ufe0f [**issue 2**](http://testserver/organizations/foobar/issues/{self.group_ids[0]}/?referrer=github-open-pr-bot) issue2 | `Handled:` **True** `Event Count:` **2k** `Users:` **2k** |\\n| \\u203c\\ufe0f [**issue 1**](http://testserver/organizations/foo/issues/{self.group_ids[1]}/?referrer=github-open-pr-bot) issue1 | `Handled:` **True** `Event Count:` **1k** `Users:` **1k** |\\n
\\n---\\n\\nDid you find this useful? React with a \\ud83d\\udc4d or \\ud83d\\udc4e or let us know in #proj-github-pr-comments"}}'.encode() + ) + + pull_request_comment_query = PullRequestComment.objects.all() + assert len(pull_request_comment_query) == 1 + assert pull_request_comment_query[0].external_id == 1 + assert pull_request_comment_query[0].comment_type == CommentType.OPEN_PR + mock_metrics.incr.assert_called_with("github_open_pr_comment.comment_created") + + @patch("sentry.tasks.integrations.github.open_pr_comment.get_pr_filenames") + @patch( + "sentry.tasks.integrations.github.open_pr_comment.get_projects_and_filenames_from_source_file" + ) + @patch("sentry.tasks.integrations.github.open_pr_comment.get_top_5_issues_by_count_for_file") + @patch("sentry.tasks.integrations.github.open_pr_comment.safe_for_comment", return_value=True) + @patch("sentry.tasks.integrations.github.open_pr_comment.metrics") + @responses.activate + def test_comment_workflow_early_return( + self, + mock_metrics, + mock_safe_for_comment, + mock_issues, + mock_reverse_codemappings, + mock_pr_filenames, + ): + mock_pr_filenames.return_value = ["foo.py"] + # no codemappings + mock_reverse_codemappings.return_value = ([], []) + mock_issues.return_value = [] + + open_pr_comment_workflow(self.pr.id) + + pull_request_comment_query = PullRequestComment.objects.all() + assert len(pull_request_comment_query) == 0 + mock_metrics.incr.assert_called_with("github_open_pr_comment.no_issues") + + # has codemappings but no issues + mock_reverse_codemappings.return_value = ([self.project], ["foo.py"]) + + open_pr_comment_workflow(self.pr.id) + + pull_request_comment_query = PullRequestComment.objects.all() + assert len(pull_request_comment_query) == 0 + mock_metrics.incr.assert_called_with("github_open_pr_comment.no_issues") + + @patch("sentry.tasks.integrations.github.open_pr_comment.get_pr_filenames") + @patch( + "sentry.tasks.integrations.github.open_pr_comment.get_projects_and_filenames_from_source_file" + ) + @patch("sentry.tasks.integrations.github.open_pr_comment.get_top_5_issues_by_count_for_file") + @patch("sentry.tasks.integrations.github.open_pr_comment.safe_for_comment", return_value=True) + @patch("sentry.tasks.integrations.github.open_pr_comment.metrics") + @responses.activate + def test_comment_workflow_api_error( + self, + mock_metrics, + mock_safe_for_comment, + mock_issues, + mock_reverse_codemappings, + mock_pr_filenames, + ): + mock_pr_filenames.return_value = ["foo.py"] + mock_reverse_codemappings.return_value = ([self.project], ["foo.py"]) + + mock_issues.return_value = self.groups + + responses.add( + responses.POST, + self.base_url + "/repos/getsentry/sentry/issues/1/comments", + status=400, + json={"id": 1}, + ) + responses.add( + responses.POST, + self.base_url + "/repos/getsentry/sentry/issues/2/comments", + status=400, + json={ + "message": "Unable to create comment because issue is locked.", + "documentation_url": "https://docs.github.com/articles/locking-conversations/", + }, + ) + responses.add( + responses.POST, + self.base_url + "/repos/getsentry/sentry/issues/3/comments", + status=400, + json={ + "message": "API rate limit exceeded", + "documentation_url": "https://docs.github.com/rest/overview/resources-in-the-rest-api#rate-limiting", + }, + ) + + with pytest.raises(ApiError): + open_pr_comment_workflow(self.pr.id) + mock_metrics.incr.assert_called_with("github_open_pr_comment.api_error") + + pr_2 = self.create_pr_issues() + + # does not raise ApiError for locked issue + open_pr_comment_workflow(pr_2.id) + mock_metrics.incr.assert_called_with( + "github_open_pr_comment.error", tags={"type": "issue_locked_error"} + ) + + pr_3 = self.create_pr_issues() + + # does not raise ApiError for rate limited error + open_pr_comment_workflow(pr_3.id) + mock_metrics.incr.assert_called_with( + "github_open_pr_comment.error", tags={"type": "rate_limited_error"} + ) + + @patch("sentry.tasks.integrations.github.open_pr_comment.get_pr_filenames") + @patch("sentry.tasks.integrations.github.open_pr_comment.metrics") + def test_comment_workflow_missing_pr(self, mock_metrics, mock_pr_filenames): + PullRequest.objects.all().delete() + + open_pr_comment_workflow(0) + + assert not mock_pr_filenames.called + mock_metrics.incr.assert_called_with( + "github_open_pr_comment.error", tags={"type": "missing_pr"} + ) + + @patch("sentry.tasks.integrations.github.open_pr_comment.get_pr_filenames") + @patch("sentry.tasks.integrations.github.open_pr_comment.metrics") + def test_comment_workflow_missing_org(self, mock_metrics, mock_pr_filenames): + self.pr.organization_id = 0 + self.pr.save() + + open_pr_comment_workflow(self.pr.id) + + assert not mock_pr_filenames.called + mock_metrics.incr.assert_called_with( + "github_open_pr_comment.error", tags={"type": "missing_org"} + ) + + @patch("sentry.tasks.integrations.github.open_pr_comment.get_pr_filenames") + def test_comment_workflow_missing_org_option(self, mock_pr_filenames): + OrganizationOption.objects.set_value( + organization=self.organization, key="sentry:github_open_pr_bot", value=False + ) + open_pr_comment_workflow(self.pr.id) + + assert not mock_pr_filenames.called + + @patch("sentry.tasks.integrations.github.open_pr_comment.get_pr_filenames") + @patch("sentry.tasks.integrations.github.open_pr_comment.metrics") + def test_comment_workflow_missing_repo(self, mock_metrics, mock_pr_filenames): + self.pr.repository_id = 0 + self.pr.save() + + open_pr_comment_workflow(self.pr.id) + + assert not mock_pr_filenames.called + mock_metrics.incr.assert_called_with( + "github_open_pr_comment.error", tags={"type": "missing_repo"} + ) + + @patch("sentry.tasks.integrations.github.open_pr_comment.get_pr_filenames") + @patch("sentry.tasks.integrations.github.open_pr_comment.metrics") + def test_comment_workflow_missing_integration(self, mock_metrics, mock_pr_filenames): + # invalid integration id + self.gh_repo.integration_id = 0 + self.gh_repo.save() + + open_pr_comment_workflow(self.pr.id) + + assert not mock_pr_filenames.called + mock_metrics.incr.assert_called_with( + "github_open_pr_comment.error", tags={"type": "missing_integration"} + ) + + @patch("sentry.tasks.integrations.github.open_pr_comment.safe_for_comment", return_value=False) + @patch("sentry.tasks.integrations.github.open_pr_comment.get_pr_filenames") + @patch("sentry.tasks.integrations.github.open_pr_comment.metrics") + def test_comment_workflow_not_safe_for_comment( + self, mock_metrics, mock_pr_filenames, mock_safe_for_comment + ): + open_pr_comment_workflow(self.pr.id) + + assert not mock_pr_filenames.called + mock_metrics.incr.assert_called_with( + "github_open_pr_comment.error", tags={"type": "unsafe_for_comment"} + ) diff --git a/tests/sentry/tasks/integrations/github/test_pr_comment.py b/tests/sentry/tasks/integrations/github/test_pr_comment.py index 2287acd7ea8534..aef60d3db9e9fc 100644 --- a/tests/sentry/tasks/integrations/github/test_pr_comment.py +++ b/tests/sentry/tasks/integrations/github/test_pr_comment.py @@ -427,49 +427,18 @@ def test_comment_workflow_api_error(self, mock_metrics, mock_issues): status=400, json={"id": 1}, ) - - with pytest.raises(ApiError): - github_comment_workflow(self.pr.id, self.project.id) - assert cache.get(self.cache_key) is None - mock_metrics.incr.assert_called_with("github_pr_comment.api_error") - - @patch("sentry.tasks.integrations.github.pr_comment.get_top_5_issues_by_count") - @patch("sentry.tasks.integrations.github.pr_comment.metrics") - @responses.activate - def test_comment_workflow_api_error_locked_issue(self, mock_metrics, mock_issues): - cache.set(self.cache_key, True, timedelta(minutes=5).total_seconds()) - mock_issues.return_value = [ - {"group_id": g.id, "event_count": 10} for g in Group.objects.all() - ] - responses.add( responses.POST, - self.base_url + "/repos/getsentry/sentry/issues/1/comments", + self.base_url + "/repos/getsentry/sentry/issues/2/comments", status=400, json={ "message": "Unable to create comment because issue is locked.", "documentation_url": "https://docs.github.com/articles/locking-conversations/", }, ) - - github_comment_workflow(self.pr.id, self.project.id) - assert cache.get(self.cache_key) is None - mock_metrics.incr.assert_called_with( - "github_pr_comment.error", tags={"type": "issue_locked_error"} - ) - - @patch("sentry.tasks.integrations.github.pr_comment.get_top_5_issues_by_count") - @patch("sentry.tasks.integrations.github.pr_comment.metrics") - @responses.activate - def test_comment_workflow_api_error_rate_limited(self, mock_metrics, mock_issues): - cache.set(self.cache_key, True, timedelta(minutes=5).total_seconds()) - mock_issues.return_value = [ - {"group_id": g.id, "event_count": 10} for g in Group.objects.all() - ] - responses.add( responses.POST, - self.base_url + "/repos/getsentry/sentry/issues/1/comments", + self.base_url + "/repos/getsentry/sentry/issues/3/comments", status=400, json={ "message": "API rate limit exceeded", @@ -477,8 +446,29 @@ def test_comment_workflow_api_error_rate_limited(self, mock_metrics, mock_issues }, ) - github_comment_workflow(self.pr.id, self.project.id) - assert cache.get(self.cache_key) is None + with pytest.raises(ApiError): + github_comment_workflow(self.pr.id, self.project.id) + assert cache.get(self.cache_key) is None + mock_metrics.incr.assert_called_with("github_pr_comment.api_error") + + pr_2 = self.create_pr_issues() + cache_key = DEBOUNCE_PR_COMMENT_CACHE_KEY(pr_2.id) + cache.set(cache_key, True, timedelta(minutes=5).total_seconds()) + + # does not raise ApiError for locked issue + github_comment_workflow(pr_2.id, self.project.id) + assert cache.get(cache_key) is None + mock_metrics.incr.assert_called_with( + "github_pr_comment.error", tags={"type": "issue_locked_error"} + ) + + pr_3 = self.create_pr_issues() + cache_key = DEBOUNCE_PR_COMMENT_CACHE_KEY(pr_3.id) + cache.set(cache_key, True, timedelta(minutes=5).total_seconds()) + + # does not raise ApiError for rate limited error + github_comment_workflow(pr_3.id, self.project.id) + assert cache.get(cache_key) is None mock_metrics.incr.assert_called_with( "github_pr_comment.error", tags={"type": "rate_limited_error"} ) @@ -724,7 +714,6 @@ def test_comment_reactions_task_api_error_one(self, mock_metrics): @patch("sentry.tasks.integrations.github.pr_comment.metrics") @responses.activate def test_comment_reactions_task_api_error_rate_limited(self, mock_metrics): - responses.add( responses.GET, self.base_url + "/repos/getsentry/sentry/issues/comments/2", @@ -746,7 +735,6 @@ def test_comment_reactions_task_api_error_rate_limited(self, mock_metrics): @patch("sentry.tasks.integrations.github.pr_comment.metrics") @responses.activate def test_comment_reactions_task_api_error_404(self, mock_metrics): - responses.add( responses.GET, self.base_url + "/repos/getsentry/sentry/issues/comments/2", diff --git a/tests/snuba/api/endpoints/test_organization_events_mep.py b/tests/snuba/api/endpoints/test_organization_events_mep.py index 60121ed421f274..045d45fb9e0ce8 100644 --- a/tests/snuba/api/endpoints/test_organization_events_mep.py +++ b/tests/snuba/api/endpoints/test_organization_events_mep.py @@ -2587,6 +2587,110 @@ def test_device_class_filter(self): assert data[0]["device.class"] == level assert meta["fields"]["device.class"] == "string" + def test_performance_score(self): + self.store_transaction_metric( + 0.03, + metric="measurements.score.lcp", + tags={"transaction": "foo_transaction"}, + timestamp=self.min_ago, + ) + self.store_transaction_metric( + 0.30, + metric="measurements.score.weight.lcp", + tags={"transaction": "foo_transaction"}, + timestamp=self.min_ago, + ) + self.store_transaction_metric( + 0.35, + metric="measurements.score.fcp", + tags={"transaction": "foo_transaction"}, + timestamp=self.min_ago, + ) + self.store_transaction_metric( + 0.70, + metric="measurements.score.weight.fcp", + tags={"transaction": "foo_transaction"}, + timestamp=self.min_ago, + ) + self.store_transaction_metric( + 0.38, + metric="measurements.score.total", + tags={"transaction": "foo_transaction"}, + timestamp=self.min_ago, + ) + + self.store_transaction_metric( + 1.00, + metric="measurements.score.lcp", + tags={"transaction": "foo_transaction"}, + timestamp=self.min_ago, + ) + self.store_transaction_metric( + 1.00, + metric="measurements.score.weight.lcp", + tags={"transaction": "foo_transaction"}, + timestamp=self.min_ago, + ) + self.store_transaction_metric( + 0.00, + metric="measurements.score.fid", + tags={"transaction": "foo_transaction"}, + timestamp=self.min_ago, + ) + # These fid and ttfb scenarios shouldn't really be happening, but we can test them anyways + self.store_transaction_metric( + 0.00, + metric="measurements.score.weight.fid", + tags={"transaction": "foo_transaction"}, + timestamp=self.min_ago, + ) + self.store_transaction_metric( + 1.00, + metric="measurements.score.ttfb", + tags={"transaction": "foo_transaction"}, + timestamp=self.min_ago, + ) + self.store_transaction_metric( + 0.00, + metric="measurements.score.weight.ttfb", + tags={"transaction": "foo_transaction"}, + timestamp=self.min_ago, + ) + self.store_transaction_metric( + 1.00, + metric="measurements.score.total", + tags={"transaction": "foo_transaction"}, + timestamp=self.min_ago, + ) + + response = self.do_request( + { + "field": [ + "transaction", + "performance_score(measurements.score.lcp)", + "performance_score(measurements.score.fcp)", + "performance_score(measurements.score.fid)", + "performance_score(measurements.score.ttfb)", + ], + "query": "event.type:transaction", + "dataset": "metrics", + "per_page": 50, + } + ) + assert response.status_code == 200, response.content + assert len(response.data["data"]) == 1 + data = response.data["data"] + meta = response.data["meta"] + field_meta = meta["fields"] + + assert data[0]["performance_score(measurements.score.lcp)"] == 0.7923076923076923 + assert data[0]["performance_score(measurements.score.fcp)"] == 0.5 + assert data[0]["performance_score(measurements.score.fid)"] == 0 + assert data[0]["performance_score(measurements.score.ttfb)"] is None + + assert meta["isMetricsData"] + assert field_meta["performance_score(measurements.score.lcp)"] == "integer" + class OrganizationEventsMetricsEnhancedPerformanceEndpointTestWithOnDemandMetrics( MetricsEnhancedPerformanceTestCase @@ -2677,3 +2781,7 @@ def test_device_class(self): @pytest.mark.xfail(reason="Not implemented") def test_device_class_filter(self): super().test_device_class_filter() + + @pytest.mark.xfail(reason="Not implemented") + def test_performance_score(self): + super().test_performance_score() diff --git a/yarn.lock b/yarn.lock index 88dec8e8b45ee3..15c34eb546760a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -11291,10 +11291,10 @@ typescript-template-language-service-decorator@^2.3.2: resolved "https://registry.yarnpkg.com/typescript-template-language-service-decorator/-/typescript-template-language-service-decorator-2.3.2.tgz#095c1b5ea88c839d9b202fad3ec8c87c1b062953" integrity sha512-hN0zNkr5luPCeXTlXKxsfBPlkAzx86ZRM1vPdL7DbEqqWoeXSxplACy98NpKpLmXsdq7iePUzAXloCAoPKBV6A== -typescript@^5.2.2: - version "5.2.2" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78" - integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w== +typescript@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.3.2.tgz#00d1c7c1c46928c5845c1ee8d0cc2791031d4c43" + integrity sha512-6l+RyNy7oAHDfxC4FzSJcz9vnjTKxrLpDG5M2Vu4SHRVNg6xzqZp6LYSR9zjqQTu8DU/f5xwxUdADOkbrIX2gQ== u2f-api@1.0.10: version "1.0.10"