From c688e71bc6da8613bd226e3d630dfb7ec7ab77a7 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Fri, 20 Sep 2024 10:33:56 -0400 Subject: [PATCH 01/44] fix(hybridcloud) Move cache clearing for sentryapps to a task (#77791) Clearing the region cache for large sentryapps is timing out due to query timeouts. Moving the RPC operations into a task will allow the outbox to be processed more efficiently. If the clear cache task is dropped, caches will naturally expire due to TTL. Fixes SENTRY-3DEZ --- src/sentry/receivers/outbox/control.py | 35 ++----------- src/sentry/tasks/sentry_apps.py | 49 ++++++++++++++++++- tests/sentry/receivers/outbox/test_control.py | 9 ++-- 3 files changed, 57 insertions(+), 36 deletions(-) diff --git a/src/sentry/receivers/outbox/control.py b/src/sentry/receivers/outbox/control.py index 891acfc3b0a44..1a5e94d448fdc 100644 --- a/src/sentry/receivers/outbox/control.py +++ b/src/sentry/receivers/outbox/control.py @@ -9,7 +9,6 @@ from __future__ import annotations import logging -from collections import defaultdict from collections.abc import Mapping from typing import Any @@ -17,18 +16,15 @@ from sentry.hybridcloud.outbox.category import OutboxCategory from sentry.hybridcloud.outbox.signals import process_control_outbox -from sentry.hybridcloud.rpc.caching import region_caching_service from sentry.integrations.models.integration import Integration from sentry.issues.services.issue import issue_service from sentry.models.apiapplication import ApiApplication from sentry.models.files.utils import get_relocation_storage -from sentry.models.organizationmapping import OrganizationMapping from sentry.organizations.services.organization import RpcOrganizationSignal, organization_service from sentry.receivers.outbox import maybe_process_tombstone from sentry.relocation.services.relocation_export.service import region_relocation_export_service from sentry.sentry_apps.models.sentry_app import SentryApp -from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation -from sentry.sentry_apps.services.app.service import get_by_application_id, get_installation +from sentry.tasks.sentry_apps import clear_region_cache logger = logging.getLogger(__name__) @@ -54,32 +50,9 @@ def process_sentry_app_updates(object_identifier: int, region_name: str, **kwds: ) is None: return - # When a sentry app's definition changes purge cache for all the installations. - # This could get slow for large applications, but generally big applications don't change often. - install_query = SentryAppInstallation.objects.filter(sentry_app=sentry_app).values( - "id", "organization_id" - ) - # There isn't a constraint on org : sentryapp so we have to handle lists - install_map: dict[int, list[int]] = defaultdict(list) - for install_row in install_query: - install_map[install_row["organization_id"]].append(install_row["id"]) - - # Clear application_id cache - region_caching_service.clear_key( - key=get_by_application_id.key_from(sentry_app.application_id), region_name=region_name - ) - - # Limit our operations to the region this outbox is for. - # This could be a single query if we use raw_sql. - region_query = OrganizationMapping.objects.filter( - organization_id__in=list(install_map.keys()), region_name=region_name - ).values("organization_id") - for region_row in region_query: - installs = install_map[region_row["organization_id"]] - for install_id in installs: - region_caching_service.clear_key( - key=get_installation.key_from(install_id), region_name=region_name - ) + # Spawn a task to clear caches, as there can be 1000+ installations + # for a sentry app. + clear_region_cache.delay(sentry_app_id=sentry_app.id, region_name=region_name) @receiver(process_control_outbox, sender=OutboxCategory.API_APPLICATION_UPDATE) diff --git a/src/sentry/tasks/sentry_apps.py b/src/sentry/tasks/sentry_apps.py index 3b02bddab2916..1d127932ad7ee 100644 --- a/src/sentry/tasks/sentry_apps.py +++ b/src/sentry/tasks/sentry_apps.py @@ -1,6 +1,7 @@ from __future__ import annotations import logging +from collections import defaultdict from collections.abc import Mapping from typing import Any @@ -12,14 +13,20 @@ from sentry.api.serializers import AppPlatformEvent, serialize from sentry.constants import SentryAppInstallationStatus from sentry.eventstore.models import Event, GroupEvent +from sentry.hybridcloud.rpc.caching import region_caching_service from sentry.models.activity import Activity from sentry.models.group import Group from sentry.models.organization import Organization +from sentry.models.organizationmapping import OrganizationMapping from sentry.models.project import Project from sentry.models.servicehook import ServiceHook, ServiceHookProject -from sentry.sentry_apps.models.sentry_app import VALID_EVENTS +from sentry.sentry_apps.models.sentry_app import VALID_EVENTS, SentryApp from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation -from sentry.sentry_apps.services.app.service import app_service +from sentry.sentry_apps.services.app.service import ( + app_service, + get_by_application_id, + get_installation, +) from sentry.shared_integrations.exceptions import ApiHostError, ApiTimeoutError, ClientError from sentry.silo.base import SiloMode from sentry.tasks.base import instrumented_task, retry @@ -247,6 +254,44 @@ def installation_webhook(installation_id, user_id, *args, **kwargs): InstallationNotifier.run(install=install, user=user, action="created") +@instrumented_task( + name="sentry.sentry_apps.tasks.installations.clear_region_cache", **CONTROL_TASK_OPTIONS +) +def clear_region_cache(sentry_app_id: int, region_name: str) -> None: + try: + sentry_app = SentryApp.objects.get(id=sentry_app_id) + except SentryApp.DoesNotExist: + return + + # When a sentry app's definition changes purge cache for all the installations. + # This could get slow for large applications, but generally big applications don't change often. + install_query = SentryAppInstallation.objects.filter( + sentry_app=sentry_app, + ).values("id", "organization_id") + + # There isn't a constraint on org : sentryapp so we have to handle lists + install_map: dict[int, list[int]] = defaultdict(list) + for install_row in install_query: + install_map[install_row["organization_id"]].append(install_row["id"]) + + # Clear application_id cache + region_caching_service.clear_key( + key=get_by_application_id.key_from(sentry_app.application_id), region_name=region_name + ) + + # Limit our operations to the region this outbox is for. + # This could be a single query if we use raw_sql. + region_query = OrganizationMapping.objects.filter( + organization_id__in=list(install_map.keys()), region_name=region_name + ).values("organization_id") + for region_row in region_query: + installs = install_map[region_row["organization_id"]] + for install_id in installs: + region_caching_service.clear_key( + key=get_installation.key_from(install_id), region_name=region_name + ) + + @instrumented_task(name="sentry.tasks.sentry_apps.workflow_notification", **TASK_OPTIONS) @retry_decorator def workflow_notification(installation_id, issue_id, type, user_id, *args, **kwargs): diff --git a/tests/sentry/receivers/outbox/test_control.py b/tests/sentry/receivers/outbox/test_control.py index a4ba6862d7ab8..0c653d100b270 100644 --- a/tests/sentry/receivers/outbox/test_control.py +++ b/tests/sentry/receivers/outbox/test_control.py @@ -19,7 +19,7 @@ class ProcessControlOutboxTest(TestCase): identifier = 1 @patch("sentry.receivers.outbox.control.maybe_process_tombstone") - def test_process_integration_updatess(self, mock_maybe_process): + def test_process_integration_updates(self, mock_maybe_process): process_integration_updates( object_identifier=self.identifier, region_name=_TEST_REGION.name ) @@ -36,7 +36,7 @@ def test_process_api_application_updates(self, mock_maybe_process): ApiApplication, self.identifier, region_name=_TEST_REGION.name ) - @patch("sentry.receivers.outbox.control.region_caching_service") + @patch("sentry.tasks.sentry_apps.region_caching_service") def test_process_sentry_app_updates(self, mock_caching): org = self.create_organization() sentry_app = self.create_sentry_app() @@ -48,7 +48,10 @@ def test_process_sentry_app_updates(self, mock_caching): slug=sentry_app.slug, organization=org_two ) - process_sentry_app_updates(object_identifier=sentry_app.id, region_name=_TEST_REGION.name) + with self.tasks(): + process_sentry_app_updates( + object_identifier=sentry_app.id, region_name=_TEST_REGION.name + ) mock_caching.clear_key.assert_any_call( key=f"app_service.get_installation:{install.id}", region_name=_TEST_REGION.name ) From 3629685bfeb5404937b385ce0bbd767ff25bd1ea Mon Sep 17 00:00:00 2001 From: Mark Story Date: Fri, 20 Sep 2024 10:34:29 -0400 Subject: [PATCH 02/44] chore(deletions) Move deletions code to mypy strong list (#77721) Add missing type information to get deletions into the strong typing list. I've also removed the `dependencies` functionality from `DeletionTaskManager` as it was unused. Refs #77479 --- pyproject.toml | 3 +- src/sentry/deletions/base.py | 113 +++++++++++------- .../deletions/defaults/alert_rule_trigger.py | 7 +- .../defaults/alert_rule_trigger_action.py | 7 +- src/sentry/deletions/defaults/alertrule.py | 7 +- .../deletions/defaults/apiapplication.py | 13 +- src/sentry/deletions/defaults/apigrant.py | 11 +- src/sentry/deletions/defaults/apitoken.py | 9 +- .../deletions/defaults/artifactbundle.py | 7 +- src/sentry/deletions/defaults/commit.py | 7 +- src/sentry/deletions/defaults/commitauthor.py | 7 +- .../deletions/defaults/discoversavedquery.py | 7 +- src/sentry/deletions/defaults/group.py | 8 +- src/sentry/deletions/defaults/grouphash.py | 7 +- src/sentry/deletions/defaults/grouphistory.py | 3 +- src/sentry/deletions/defaults/monitor.py | 7 +- .../deletions/defaults/monitor_environment.py | 7 +- src/sentry/deletions/defaults/organization.py | 17 +-- .../defaults/organizationintegration.py | 13 +- .../deletions/defaults/organizationmember.py | 10 +- .../defaults/platform_external_issue.py | 9 +- src/sentry/deletions/defaults/project.py | 14 ++- src/sentry/deletions/defaults/pullrequest.py | 7 +- .../deletions/defaults/querysubscription.py | 5 +- src/sentry/deletions/defaults/release.py | 7 +- src/sentry/deletions/defaults/repository.py | 14 +-- .../defaults/repositoryprojectpathconfig.py | 7 +- src/sentry/deletions/defaults/rule.py | 11 +- .../deletions/defaults/rulefirehistory.py | 7 +- src/sentry/deletions/defaults/sentry_app.py | 11 +- .../defaults/sentry_app_installation.py | 13 +- .../defaults/sentry_app_installation_token.py | 15 ++- src/sentry/deletions/defaults/service_hook.py | 5 +- src/sentry/deletions/defaults/team.py | 13 +- src/sentry/deletions/manager.py | 38 +++--- src/sentry/deletions/tasks/hybrid_cloud.py | 24 ++-- src/sentry/deletions/tasks/scheduled.py | 8 +- src/sentry/models/scheduledeletion.py | 6 +- src/sentry/tasks/repository.py | 4 - 39 files changed, 279 insertions(+), 209 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 08b02505395b6..de1cb687a3a0d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -449,8 +449,7 @@ module = [ "sentry.db.models.manager.*", "sentry.db.models.paranoia", "sentry.db.models.utils", - "sentry.deletions", - "sentry.deletions.tasks.groups", + "sentry.deletions.*", "sentry.digests.notifications", "sentry.eventstore.reprocessing.redis", "sentry.eventtypes.error", diff --git a/src/sentry/deletions/base.py b/src/sentry/deletions/base.py index ce82a9530a8e2..856d5ff2f5f49 100644 --- a/src/sentry/deletions/base.py +++ b/src/sentry/deletions/base.py @@ -2,8 +2,11 @@ import logging import re +from collections.abc import Mapping, Sequence +from typing import TYPE_CHECKING, Any, Generic, TypeVar from sentry.constants import ObjectStatus +from sentry.db.models.base import Model from sentry.users.services.user.model import RpcUser from sentry.users.services.user.service import user_service from sentry.utils import metrics @@ -12,7 +15,16 @@ _leaf_re = re.compile(r"^(UserReport|Event|Group)(.+)") -def _delete_children(manager, relations, transaction_id=None, actor_id=None): +if TYPE_CHECKING: + from sentry.deletions.manager import DeletionTaskManager + + +def _delete_children( + manager: DeletionTaskManager, + relations: Sequence[BaseRelation], + transaction_id: str | None = None, + actor_id: int | None = None, +) -> bool: # Ideally this runs through the deletion manager for relation in relations: task = manager.get( @@ -34,17 +46,23 @@ def _delete_children(manager, relations, transaction_id=None, actor_id=None): class BaseRelation: - def __init__(self, params, task): + def __init__(self, params: Mapping[str, Any], task: type[BaseDeletionTask[Any]] | None) -> None: self.task = task self.params = params - def __repr__(self): + def __repr__(self) -> str: class_type = type(self) return f"<{class_type.__module__}.{class_type.__name__}: task={self.task} params={self.params}>" class ModelRelation(BaseRelation): - def __init__(self, model, query, task=None, partition_key=None): + def __init__( + self, + model: type[ModelT], + query: Mapping[str, Any], + task: type[BaseDeletionTask[Any]] | None = None, + partition_key: str | None = None, + ) -> None: params = {"model": model, "query": query} if partition_key: @@ -53,13 +71,21 @@ def __init__(self, model, query, task=None, partition_key=None): super().__init__(params=params, task=task) -class BaseDeletionTask: +ModelT = TypeVar("ModelT", bound=Model) + + +class BaseDeletionTask(Generic[ModelT]): logger = logging.getLogger("sentry.deletions.async") DEFAULT_CHUNK_SIZE = 100 def __init__( - self, manager, skip_models=None, transaction_id=None, actor_id=None, chunk_size=None + self, + manager: DeletionTaskManager, + skip_models: list[type[Model]] | None = None, + transaction_id: str | None = None, + actor_id: int | None = None, + chunk_size: int | None = None, ): self.manager = manager self.skip_models = set(skip_models) if skip_models else None @@ -67,7 +93,7 @@ def __init__( self.actor_id = actor_id self.chunk_size = chunk_size if chunk_size is not None else self.DEFAULT_CHUNK_SIZE - def __repr__(self): + def __repr__(self) -> str: return "<{}: skip_models={} transaction_id={} actor_id={}>".format( type(self), self.skip_models, @@ -82,7 +108,7 @@ def chunk(self) -> bool: """ raise NotImplementedError - def should_proceed(self, instance): + def should_proceed(self, instance: ModelT) -> bool: """ Used by root tasks to ensure deletion is ok to proceed. This allows deletes to be undone by API endpoints without @@ -90,32 +116,26 @@ def should_proceed(self, instance): """ return True - def get_child_relations(self, instance): + def get_child_relations(self, instance: ModelT) -> list[BaseRelation]: # TODO(dcramer): it'd be nice if we collected the default relationships return [ # ModelRelation(Model, {'parent_id': instance.id}) ] - def get_child_relations_bulk(self, instance_list): + def get_child_relations_bulk(self, instance_list: Sequence[ModelT]) -> list[BaseRelation]: return [ # ModelRelation(Model, {'parent_id__in': [i.id for id in instance_list]}) ] - def extend_relations(self, child_relations, obj): - return child_relations - - def extend_relations_bulk(self, child_relations, obj_list): - return child_relations - - def filter_relations(self, child_relations): + def filter_relations(self, child_relations: Sequence[BaseRelation]) -> list[BaseRelation]: if not self.skip_models or not child_relations: - return child_relations + return list(child_relations) return list( rel for rel in child_relations if rel.params.get("model") not in self.skip_models ) - def delete_bulk(self, instance_list) -> bool: + def delete_bulk(self, instance_list: Sequence[ModelT]) -> bool: """ Delete a batch of objects bound to this task. @@ -125,7 +145,6 @@ def delete_bulk(self, instance_list) -> bool: self.mark_deletion_in_progress(instance_list) child_relations = self.get_child_relations_bulk(instance_list) - child_relations = self.extend_relations_bulk(child_relations, instance_list) child_relations = self.filter_relations(child_relations) if child_relations: has_more = self.delete_children(child_relations) @@ -134,41 +153,50 @@ def delete_bulk(self, instance_list) -> bool: for instance in instance_list: child_relations = self.get_child_relations(instance) - child_relations = self.extend_relations(child_relations, instance) child_relations = self.filter_relations(child_relations) if child_relations: has_more = self.delete_children(child_relations) if has_more: return has_more - return self.delete_instance_bulk(instance_list) + self.delete_instance_bulk(instance_list) - def delete_instance(self, instance): + return False + + def delete_instance(self, instance: ModelT) -> None: raise NotImplementedError - def delete_instance_bulk(self, instance_list): + def delete_instance_bulk(self, instance_list: Sequence[ModelT]) -> None: for instance in instance_list: self.delete_instance(instance) - def delete_children(self, relations): + def delete_children(self, relations: list[BaseRelation]) -> bool: return _delete_children(self.manager, relations, self.transaction_id, self.actor_id) - def mark_deletion_in_progress(self, instance_list): + def mark_deletion_in_progress(self, instance_list: Sequence[ModelT]) -> None: pass -class ModelDeletionTask(BaseDeletionTask): +class ModelDeletionTask(BaseDeletionTask[ModelT]): DEFAULT_QUERY_LIMIT = None manager_name = "objects" - def __init__(self, manager, model, query, query_limit=None, order_by=None, **kwargs): + def __init__( + self, + manager: DeletionTaskManager, + model: type[ModelT], + query: Mapping[str, Any], + query_limit: int | None = None, + order_by: str | None = None, + **kwargs: Any, + ): super().__init__(manager, **kwargs) self.model = model self.query = query self.query_limit = query_limit or self.DEFAULT_QUERY_LIMIT or self.chunk_size self.order_by = order_by - def __repr__(self): + def __repr__(self) -> str: return "<{}: model={} query={} order_by={} transaction_id={} actor_id={}>".format( type(self), self.model, @@ -178,18 +206,6 @@ def __repr__(self): self.actor_id, ) - def extend_relations(self, child_relations, obj): - from sentry.deletions import default_manager - - return child_relations + [rel(obj) for rel in default_manager.dependencies[self.model]] - - def extend_relations_bulk(self, child_relations, obj_list): - from sentry.deletions import default_manager - - return child_relations + [ - rel(obj_list) for rel in default_manager.bulk_dependencies[self.model] - ] - def chunk(self) -> bool: """ Deletes a chunk of this instance's data. Return ``True`` if there is @@ -213,7 +229,7 @@ def chunk(self) -> bool: # We have more work to do as we didn't run out of rows to delete. return True - def delete_instance(self, instance): + def delete_instance(self, instance: ModelT) -> None: instance_id = instance.id try: instance.delete() @@ -236,14 +252,14 @@ def get_actor(self) -> RpcUser | None: return user_service.get_user(user_id=self.actor_id) return None - def mark_deletion_in_progress(self, instance_list): + def mark_deletion_in_progress(self, instance_list: Sequence[ModelT]) -> None: for instance in instance_list: status = getattr(instance, "status", None) if status not in (ObjectStatus.DELETION_IN_PROGRESS, None): instance.update(status=ObjectStatus.DELETION_IN_PROGRESS) -class BulkModelDeletionTask(ModelDeletionTask): +class BulkModelDeletionTask(ModelDeletionTask[ModelT]): """ An efficient mechanism for deleting larger volumes of rows in one pass, but will hard fail if the relations have resident foreign relations. @@ -253,7 +269,14 @@ class BulkModelDeletionTask(ModelDeletionTask): DEFAULT_CHUNK_SIZE = 10000 - def __init__(self, manager, model, query, partition_key=None, **kwargs): + def __init__( + self, + manager: DeletionTaskManager, + model: type[ModelT], + query: Mapping[str, Any], + partition_key: str | None = None, + **kwargs: Any, + ): super().__init__(manager, model, query, **kwargs) self.partition_key = partition_key diff --git a/src/sentry/deletions/defaults/alert_rule_trigger.py b/src/sentry/deletions/defaults/alert_rule_trigger.py index 9aa1469390a79..131ee0078ddae 100644 --- a/src/sentry/deletions/defaults/alert_rule_trigger.py +++ b/src/sentry/deletions/defaults/alert_rule_trigger.py @@ -1,8 +1,9 @@ -from ..base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.incidents.models.alert_rule import AlertRuleTrigger -class AlertRuleTriggerDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): +class AlertRuleTriggerDeletionTask(ModelDeletionTask[AlertRuleTrigger]): + def get_child_relations(self, instance: AlertRuleTrigger) -> list[BaseRelation]: from sentry.incidents.models.alert_rule import AlertRuleTriggerAction return [ diff --git a/src/sentry/deletions/defaults/alert_rule_trigger_action.py b/src/sentry/deletions/defaults/alert_rule_trigger_action.py index 4c050d4ca918e..ed7c6307839d4 100644 --- a/src/sentry/deletions/defaults/alert_rule_trigger_action.py +++ b/src/sentry/deletions/defaults/alert_rule_trigger_action.py @@ -1,10 +1,11 @@ -from ..base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction -class AlertRuleTriggerActionDeletionTask(ModelDeletionTask): +class AlertRuleTriggerActionDeletionTask(ModelDeletionTask[AlertRuleTriggerAction]): manager_name = "objects_for_deletion" - def get_child_relations(self, instance): + def get_child_relations(self, instance: AlertRuleTriggerAction) -> list[BaseRelation]: from sentry.models.notificationmessage import NotificationMessage return [ diff --git a/src/sentry/deletions/defaults/alertrule.py b/src/sentry/deletions/defaults/alertrule.py index 2911d3fbe620f..715c04e63cbd1 100644 --- a/src/sentry/deletions/defaults/alertrule.py +++ b/src/sentry/deletions/defaults/alertrule.py @@ -1,12 +1,13 @@ -from ..base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.incidents.models.alert_rule import AlertRule -class AlertRuleDeletionTask(ModelDeletionTask): +class AlertRuleDeletionTask(ModelDeletionTask[AlertRule]): # The default manager for alert rules excludes snapshots # which we want to include when deleting an organization. manager_name = "objects_with_snapshots" - def get_child_relations(self, instance): + def get_child_relations(self, instance: AlertRule) -> list[BaseRelation]: from sentry.incidents.models.alert_rule import AlertRuleTrigger return [ diff --git a/src/sentry/deletions/defaults/apiapplication.py b/src/sentry/deletions/defaults/apiapplication.py index f683adc0588ba..5722b79076d8e 100644 --- a/src/sentry/deletions/defaults/apiapplication.py +++ b/src/sentry/deletions/defaults/apiapplication.py @@ -1,16 +1,17 @@ -from sentry.models.apiapplication import ApiApplicationStatus +from collections.abc import Sequence -from ..base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.models.apiapplication import ApiApplication, ApiApplicationStatus -class ApiApplicationDeletionTask(ModelDeletionTask): - def should_proceed(self, instance): +class ApiApplicationDeletionTask(ModelDeletionTask[ApiApplication]): + def should_proceed(self, instance: ApiApplication) -> bool: return instance.status in { ApiApplicationStatus.pending_deletion, ApiApplicationStatus.deletion_in_progress, } - def get_child_relations(self, instance): + def get_child_relations(self, instance: ApiApplication) -> list[BaseRelation]: from sentry.models.apigrant import ApiGrant from sentry.models.apitoken import ApiToken @@ -18,7 +19,7 @@ def get_child_relations(self, instance): model_list = (ApiToken, ApiGrant) return [ModelRelation(m, {"application_id": instance.id}) for m in model_list] - def mark_deletion_in_progress(self, instance_list): + def mark_deletion_in_progress(self, instance_list: Sequence[ApiApplication]) -> None: from sentry.models.apiapplication import ApiApplicationStatus for instance in instance_list: diff --git a/src/sentry/deletions/defaults/apigrant.py b/src/sentry/deletions/defaults/apigrant.py index 32c48bf29f5cd..ff3ba854debcf 100644 --- a/src/sentry/deletions/defaults/apigrant.py +++ b/src/sentry/deletions/defaults/apigrant.py @@ -1,22 +1,23 @@ +from collections.abc import Sequence + from django.db import router +from sentry.deletions.base import ModelDeletionTask from sentry.models.apigrant import ApiGrant from sentry.silo.safety import unguarded_write -from ..base import ModelDeletionTask - -class ModelApiGrantDeletionTask(ModelDeletionTask): +class ModelApiGrantDeletionTask(ModelDeletionTask[ApiGrant]): """ Normally ApiGrants are deleted in bulk, but for cascades originating from sentry app installation, we wish to use the orm so that set null behavior functions correctly. Do not register this as the default, but instead use it as the task= parameter to a relation. """ - def mark_deletion_in_progress(self, instance_list): + def mark_deletion_in_progress(self, instance_list: Sequence[ApiGrant]) -> None: # no status to track pass - def delete_instance(self, instance): + def delete_instance(self, instance: ApiGrant) -> None: with unguarded_write(router.db_for_write(ApiGrant)): super().delete_instance(instance) diff --git a/src/sentry/deletions/defaults/apitoken.py b/src/sentry/deletions/defaults/apitoken.py index e413ba0d013ba..1f2d520c63715 100644 --- a/src/sentry/deletions/defaults/apitoken.py +++ b/src/sentry/deletions/defaults/apitoken.py @@ -1,13 +1,16 @@ -from ..base import ModelDeletionTask +from collections.abc import Sequence +from sentry.deletions.base import ModelDeletionTask +from sentry.models.apitoken import ApiToken -class ModelApiTokenDeletionTask(ModelDeletionTask): + +class ModelApiTokenDeletionTask(ModelDeletionTask[ApiToken]): """ Normally ApiTokens are deleted in bulk, but for cascades originating from sentry app installation, we wish to use the orm so that set null behavior functions correctly. Do not register this as the default, but instead use it as the task= parameter to a relation. """ - def mark_deletion_in_progress(self, instance_list): + def mark_deletion_in_progress(self, instance_list: Sequence[ApiToken]) -> None: # no status to track pass diff --git a/src/sentry/deletions/defaults/artifactbundle.py b/src/sentry/deletions/defaults/artifactbundle.py index ff6fd847f36de..d62d5f58df895 100644 --- a/src/sentry/deletions/defaults/artifactbundle.py +++ b/src/sentry/deletions/defaults/artifactbundle.py @@ -1,8 +1,9 @@ -from ..base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.models.artifactbundle import ArtifactBundle -class ArtifactBundleDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): +class ArtifactBundleDeletionTask(ModelDeletionTask[ArtifactBundle]): + def get_child_relations(self, instance: ArtifactBundle) -> list[BaseRelation]: from sentry.models.artifactbundle import ( DebugIdArtifactBundle, ProjectArtifactBundle, diff --git a/src/sentry/deletions/defaults/commit.py b/src/sentry/deletions/defaults/commit.py index 14793f9978765..d64086f95dd6f 100644 --- a/src/sentry/deletions/defaults/commit.py +++ b/src/sentry/deletions/defaults/commit.py @@ -1,8 +1,9 @@ -from ..base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.models.commit import Commit -class CommitDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): +class CommitDeletionTask(ModelDeletionTask[Commit]): + def get_child_relations(self, instance: Commit) -> list[BaseRelation]: from sentry.models.commitfilechange import CommitFileChange from sentry.models.releasecommit import ReleaseCommit from sentry.models.releaseheadcommit import ReleaseHeadCommit diff --git a/src/sentry/deletions/defaults/commitauthor.py b/src/sentry/deletions/defaults/commitauthor.py index 92345025b3d2f..3d2e1935d9321 100644 --- a/src/sentry/deletions/defaults/commitauthor.py +++ b/src/sentry/deletions/defaults/commitauthor.py @@ -1,8 +1,9 @@ -from ..base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.models.commitauthor import CommitAuthor -class CommitAuthorDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): +class CommitAuthorDeletionTask(ModelDeletionTask[CommitAuthor]): + def get_child_relations(self, instance: CommitAuthor) -> list[BaseRelation]: from sentry.models.commit import Commit return [ diff --git a/src/sentry/deletions/defaults/discoversavedquery.py b/src/sentry/deletions/defaults/discoversavedquery.py index bfe9fa39dd639..44314f99232fa 100644 --- a/src/sentry/deletions/defaults/discoversavedquery.py +++ b/src/sentry/deletions/defaults/discoversavedquery.py @@ -1,8 +1,9 @@ -from ..base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.discover.models import DiscoverSavedQuery -class DiscoverSavedQueryDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): +class DiscoverSavedQueryDeletionTask(ModelDeletionTask[DiscoverSavedQuery]): + def get_child_relations(self, instance: DiscoverSavedQuery) -> list[BaseRelation]: from sentry.discover.models import DiscoverSavedQueryProject return [ diff --git a/src/sentry/deletions/defaults/group.py b/src/sentry/deletions/defaults/group.py index b983d8f0cf5bd..c0da32c0bb6cc 100644 --- a/src/sentry/deletions/defaults/group.py +++ b/src/sentry/deletions/defaults/group.py @@ -48,7 +48,7 @@ ) -class EventDataDeletionTask(BaseDeletionTask): +class EventDataDeletionTask(BaseDeletionTask[Group]): """ Deletes nodestore data, EventAttachment and UserReports for group """ @@ -121,7 +121,7 @@ def chunk(self) -> bool: return True -class GroupDeletionTask(ModelDeletionTask): +class GroupDeletionTask(ModelDeletionTask[Group]): # Delete groups in blocks of 1000. Using 1000 aims to # balance the number of snuba replacements with memory limits. DEFAULT_CHUNK_SIZE = 1000 @@ -152,7 +152,9 @@ def delete_bulk(self, instance_list: Sequence[Group]) -> bool: self.delete_children(child_relations) # Remove group objects with children removed. - return self.delete_instance_bulk(instance_list) + self.delete_instance_bulk(instance_list) + + return False def delete_instance(self, instance: Group) -> None: from sentry import similarity diff --git a/src/sentry/deletions/defaults/grouphash.py b/src/sentry/deletions/defaults/grouphash.py index a9769e5f0dc84..8c4d431d85b32 100644 --- a/src/sentry/deletions/defaults/grouphash.py +++ b/src/sentry/deletions/defaults/grouphash.py @@ -1,8 +1,9 @@ -from ..base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.models.grouphash import GroupHash -class GroupHashDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): +class GroupHashDeletionTask(ModelDeletionTask[GroupHash]): + def get_child_relations(self, instance: GroupHash) -> list[BaseRelation]: from sentry.models.grouphashmetadata import GroupHashMetadata return [ diff --git a/src/sentry/deletions/defaults/grouphistory.py b/src/sentry/deletions/defaults/grouphistory.py index 5612fac3435b8..150e0fd1d7a66 100644 --- a/src/sentry/deletions/defaults/grouphistory.py +++ b/src/sentry/deletions/defaults/grouphistory.py @@ -1,7 +1,8 @@ from sentry.deletions.base import ModelDeletionTask +from sentry.models.grouphistory import GroupHistory -class GroupHistoryDeletionTask(ModelDeletionTask): +class GroupHistoryDeletionTask(ModelDeletionTask[GroupHistory]): """ Specialized deletion handling that operates per group diff --git a/src/sentry/deletions/defaults/monitor.py b/src/sentry/deletions/defaults/monitor.py index 4c477201c0b2e..2af8a5eccd74a 100644 --- a/src/sentry/deletions/defaults/monitor.py +++ b/src/sentry/deletions/defaults/monitor.py @@ -1,8 +1,9 @@ -from ..base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.monitors.models import Monitor -class MonitorDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): +class MonitorDeletionTask(ModelDeletionTask[Monitor]): + def get_child_relations(self, instance: Monitor) -> list[BaseRelation]: from sentry.monitors import models return [ diff --git a/src/sentry/deletions/defaults/monitor_environment.py b/src/sentry/deletions/defaults/monitor_environment.py index d166ab4724993..632550804f1ce 100644 --- a/src/sentry/deletions/defaults/monitor_environment.py +++ b/src/sentry/deletions/defaults/monitor_environment.py @@ -1,8 +1,9 @@ -from ..base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.monitors.models import MonitorEnvironment -class MonitorEnvironmentDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): +class MonitorEnvironmentDeletionTask(ModelDeletionTask[MonitorEnvironment]): + def get_child_relations(self, instance: MonitorEnvironment) -> list[BaseRelation]: from sentry.monitors import models return [ diff --git a/src/sentry/deletions/defaults/organization.py b/src/sentry/deletions/defaults/organization.py index 819b5bcf20de7..5aa3d452456e1 100644 --- a/src/sentry/deletions/defaults/organization.py +++ b/src/sentry/deletions/defaults/organization.py @@ -1,13 +1,14 @@ -from sentry.models.organization import OrganizationStatus +from collections.abc import Sequence + +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.models.organization import Organization, OrganizationStatus from sentry.organizations.services.organization_actions.impl import ( update_organization_with_outbox_message, ) -from ..base import ModelDeletionTask, ModelRelation - -class OrganizationDeletionTask(ModelDeletionTask): - def should_proceed(self, instance): +class OrganizationDeletionTask(ModelDeletionTask[Organization]): + def should_proceed(self, instance: Organization) -> bool: """ Only delete organizations that haven't been undeleted. """ @@ -16,7 +17,7 @@ def should_proceed(self, instance): OrganizationStatus.DELETION_IN_PROGRESS, } - def get_child_relations(self, instance): + def get_child_relations(self, instance: Organization) -> list[BaseRelation]: from sentry.deletions.defaults.discoversavedquery import DiscoverSavedQueryDeletionTask from sentry.discover.models import DiscoverSavedQuery, TeamKeyTransaction from sentry.incidents.models.alert_rule import AlertRule @@ -35,7 +36,7 @@ def get_child_relations(self, instance): from sentry.models.transaction_threshold import ProjectTransactionThreshold # Team must come first - relations = [ModelRelation(Team, {"organization_id": instance.id})] + relations: list[BaseRelation] = [ModelRelation(Team, {"organization_id": instance.id})] model_list = ( OrganizationMember, @@ -65,7 +66,7 @@ def get_child_relations(self, instance): return relations - def mark_deletion_in_progress(self, instance_list): + def mark_deletion_in_progress(self, instance_list: Sequence[Organization]) -> None: from sentry.models.organization import OrganizationStatus for instance in instance_list: diff --git a/src/sentry/deletions/defaults/organizationintegration.py b/src/sentry/deletions/defaults/organizationintegration.py index 941dcee637857..57befb2fb04e6 100644 --- a/src/sentry/deletions/defaults/organizationintegration.py +++ b/src/sentry/deletions/defaults/organizationintegration.py @@ -1,19 +1,18 @@ from sentry.constants import ObjectStatus +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation from sentry.integrations.models.organization_integration import OrganizationIntegration from sentry.integrations.services.repository import repository_service from sentry.types.region import RegionMappingNotFound -from ..base import ModelDeletionTask, ModelRelation - -class OrganizationIntegrationDeletionTask(ModelDeletionTask): - def should_proceed(self, instance): +class OrganizationIntegrationDeletionTask(ModelDeletionTask[OrganizationIntegration]): + def should_proceed(self, instance: OrganizationIntegration) -> bool: return instance.status in {ObjectStatus.DELETION_IN_PROGRESS, ObjectStatus.PENDING_DELETION} - def get_child_relations(self, instance): + def get_child_relations(self, instance: OrganizationIntegration) -> list[BaseRelation]: from sentry.users.models.identity import Identity - relations = [] + relations: list[BaseRelation] = [] # delete the identity attached through the default_auth_id if instance.default_auth_id: @@ -21,7 +20,7 @@ def get_child_relations(self, instance): return relations - def delete_instance(self, instance: OrganizationIntegration): + def delete_instance(self, instance: OrganizationIntegration) -> None: try: repository_service.disassociate_organization_integration( organization_id=instance.organization_id, diff --git a/src/sentry/deletions/defaults/organizationmember.py b/src/sentry/deletions/defaults/organizationmember.py index df0d7b1b7aec1..3f51fc18166a3 100644 --- a/src/sentry/deletions/defaults/organizationmember.py +++ b/src/sentry/deletions/defaults/organizationmember.py @@ -1,11 +1,11 @@ +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation from sentry.models.groupsearchview import GroupSearchView +from sentry.models.organizationmember import OrganizationMember -from ..base import ModelDeletionTask, ModelRelation - -class OrganizationMemberDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): - relations = [ +class OrganizationMemberDeletionTask(ModelDeletionTask[OrganizationMember]): + def get_child_relations(self, instance: OrganizationMember) -> list[BaseRelation]: + relations: list[BaseRelation] = [ ModelRelation( GroupSearchView, {"user_id": instance.user_id, "organization_id": instance.organization_id}, diff --git a/src/sentry/deletions/defaults/platform_external_issue.py b/src/sentry/deletions/defaults/platform_external_issue.py index 45c84fa9bf91e..ac8ecc3132829 100644 --- a/src/sentry/deletions/defaults/platform_external_issue.py +++ b/src/sentry/deletions/defaults/platform_external_issue.py @@ -1,7 +1,10 @@ -from ..base import ModelDeletionTask +from collections.abc import Sequence +from sentry.deletions.base import ModelDeletionTask +from sentry.models.platformexternalissue import PlatformExternalIssue -class PlatformExternalIssueDeletionTask(ModelDeletionTask): - def mark_deletion_in_progress(self, instance_list): + +class PlatformExternalIssueDeletionTask(ModelDeletionTask[PlatformExternalIssue]): + def mark_deletion_in_progress(self, instance_list: Sequence[PlatformExternalIssue]) -> None: # No status to track this. pass diff --git a/src/sentry/deletions/defaults/project.py b/src/sentry/deletions/defaults/project.py index 52f4c98076a7d..bd2dbddea5b30 100644 --- a/src/sentry/deletions/defaults/project.py +++ b/src/sentry/deletions/defaults/project.py @@ -1,10 +1,16 @@ from __future__ import annotations -from ..base import BulkModelDeletionTask, ModelDeletionTask, ModelRelation +from sentry.deletions.base import ( + BaseRelation, + BulkModelDeletionTask, + ModelDeletionTask, + ModelRelation, +) +from sentry.models.project import Project -class ProjectDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): +class ProjectDeletionTask(ModelDeletionTask[Project]): + def get_child_relations(self, instance: Project) -> list[BaseRelation]: from sentry.discover.models import DiscoverSavedQueryProject from sentry.incidents.models.alert_rule import AlertRule, AlertRuleProjects from sentry.incidents.models.incident import IncidentProject @@ -40,7 +46,7 @@ def get_child_relations(self, instance): from sentry.replays.models import ReplayRecordingSegment from sentry.snuba.models import QuerySubscription - relations = [ + relations: list[BaseRelation] = [ # ProjectKey gets revoked immediately, in bulk ModelRelation(ProjectKey, {"project_id": instance.id}) ] diff --git a/src/sentry/deletions/defaults/pullrequest.py b/src/sentry/deletions/defaults/pullrequest.py index 3a0651f2f3287..78bfd6ad2a016 100644 --- a/src/sentry/deletions/defaults/pullrequest.py +++ b/src/sentry/deletions/defaults/pullrequest.py @@ -1,8 +1,9 @@ -from ..base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.models.pullrequest import PullRequest -class PullRequestDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): +class PullRequestDeletionTask(ModelDeletionTask[PullRequest]): + def get_child_relations(self, instance: PullRequest) -> list[BaseRelation]: from sentry.models.pullrequest import PullRequestComment return [ diff --git a/src/sentry/deletions/defaults/querysubscription.py b/src/sentry/deletions/defaults/querysubscription.py index 465bbc315f9e5..9c9641706c406 100644 --- a/src/sentry/deletions/defaults/querysubscription.py +++ b/src/sentry/deletions/defaults/querysubscription.py @@ -1,9 +1,8 @@ +from sentry.deletions.base import ModelDeletionTask from sentry.snuba.models import QuerySubscription -from ..base import ModelDeletionTask - -class QuerySubscriptionDeletionTask(ModelDeletionTask): +class QuerySubscriptionDeletionTask(ModelDeletionTask[QuerySubscription]): def delete_instance(self, instance: QuerySubscription) -> None: from sentry.incidents.models.incident import Incident diff --git a/src/sentry/deletions/defaults/release.py b/src/sentry/deletions/defaults/release.py index 114260468bc54..9499e13a2b3e6 100644 --- a/src/sentry/deletions/defaults/release.py +++ b/src/sentry/deletions/defaults/release.py @@ -1,8 +1,9 @@ -from ..base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.models.release import Release -class ReleaseDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): +class ReleaseDeletionTask(ModelDeletionTask[Release]): + def get_child_relations(self, instance: Release) -> list[BaseRelation]: from sentry.models.deploy import Deploy from sentry.models.distribution import Distribution from sentry.models.group import Group diff --git a/src/sentry/deletions/defaults/repository.py b/src/sentry/deletions/defaults/repository.py index 812de5a967344..960befb3b93d0 100644 --- a/src/sentry/deletions/defaults/repository.py +++ b/src/sentry/deletions/defaults/repository.py @@ -1,10 +1,10 @@ from sentry.constants import ObjectStatus +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.models.repository import Repository from sentry.signals import pending_delete -from ..base import ModelDeletionTask, ModelRelation - -def _get_repository_child_relations(instance): +def _get_repository_child_relations(instance: Repository) -> list[BaseRelation]: from sentry.integrations.models.repository_project_path_config import ( RepositoryProjectPathConfig, ) @@ -18,17 +18,17 @@ def _get_repository_child_relations(instance): ] -class RepositoryDeletionTask(ModelDeletionTask): - def should_proceed(self, instance): +class RepositoryDeletionTask(ModelDeletionTask[Repository]): + def should_proceed(self, instance: Repository) -> bool: """ Only delete repositories that haven't been undeleted. """ return instance.status in {ObjectStatus.PENDING_DELETION, ObjectStatus.DELETION_IN_PROGRESS} - def get_child_relations(self, instance): + def get_child_relations(self, instance: Repository) -> list[BaseRelation]: return _get_repository_child_relations(instance) - def delete_instance(self, instance): + def delete_instance(self, instance: Repository) -> None: # TODO child_relations should also send pending_delete so we # don't have to do this here. pending_delete.send(sender=type(instance), instance=instance, actor=self.get_actor()) diff --git a/src/sentry/deletions/defaults/repositoryprojectpathconfig.py b/src/sentry/deletions/defaults/repositoryprojectpathconfig.py index 8245808a81285..ccafd905877a3 100644 --- a/src/sentry/deletions/defaults/repositoryprojectpathconfig.py +++ b/src/sentry/deletions/defaults/repositoryprojectpathconfig.py @@ -1,8 +1,9 @@ -from ..base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.integrations.models.repository_project_path_config import RepositoryProjectPathConfig -class RepositoryProjectPathConfigDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): +class RepositoryProjectPathConfigDeletionTask(ModelDeletionTask[RepositoryProjectPathConfig]): + def get_child_relations(self, instance: RepositoryProjectPathConfig) -> list[BaseRelation]: from sentry.models.projectcodeowners import ProjectCodeOwners return [ diff --git a/src/sentry/deletions/defaults/rule.py b/src/sentry/deletions/defaults/rule.py index 2582ede051bbb..31832098961b8 100644 --- a/src/sentry/deletions/defaults/rule.py +++ b/src/sentry/deletions/defaults/rule.py @@ -1,8 +1,11 @@ -from ..base import ModelDeletionTask, ModelRelation +from collections.abc import Sequence +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.models.rule import Rule -class RuleDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): + +class RuleDeletionTask(ModelDeletionTask[Rule]): + def get_child_relations(self, instance: Rule) -> list[BaseRelation]: from sentry.models.grouprulestatus import GroupRuleStatus from sentry.models.rule import RuleActivity from sentry.models.rulefirehistory import RuleFireHistory @@ -13,7 +16,7 @@ def get_child_relations(self, instance): ModelRelation(RuleActivity, {"rule_id": instance.id}), ] - def mark_deletion_in_progress(self, instance_list): + def mark_deletion_in_progress(self, instance_list: Sequence[Rule]) -> None: from sentry.constants import ObjectStatus for instance in instance_list: diff --git a/src/sentry/deletions/defaults/rulefirehistory.py b/src/sentry/deletions/defaults/rulefirehistory.py index a2f483ff78a12..67e4f54569cae 100644 --- a/src/sentry/deletions/defaults/rulefirehistory.py +++ b/src/sentry/deletions/defaults/rulefirehistory.py @@ -1,8 +1,9 @@ -from sentry.deletions.base import ModelDeletionTask, ModelRelation +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.models.rulefirehistory import RuleFireHistory -class RuleFireHistoryDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): +class RuleFireHistoryDeletionTask(ModelDeletionTask[RuleFireHistory]): + def get_child_relations(self, instance: RuleFireHistory) -> list[BaseRelation]: from sentry.models.notificationmessage import NotificationMessage return [ diff --git a/src/sentry/deletions/defaults/sentry_app.py b/src/sentry/deletions/defaults/sentry_app.py index a6549af04a85e..b0e8a618fdb43 100644 --- a/src/sentry/deletions/defaults/sentry_app.py +++ b/src/sentry/deletions/defaults/sentry_app.py @@ -1,8 +1,11 @@ -from ..base import ModelDeletionTask, ModelRelation +from collections.abc import Sequence +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.sentry_apps.models.sentry_app import SentryApp -class SentryAppDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): + +class SentryAppDeletionTask(ModelDeletionTask[SentryApp]): + def get_child_relations(self, instance: SentryApp) -> list[BaseRelation]: from sentry.models.apiapplication import ApiApplication from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation from sentry.users.models.user import User @@ -13,7 +16,7 @@ def get_child_relations(self, instance): ModelRelation(ApiApplication, {"id": instance.application_id}), ] - def mark_deletion_in_progress(self, instance_list): + def mark_deletion_in_progress(self, instance_list: Sequence[SentryApp]) -> None: from sentry.constants import SentryAppStatus for instance in instance_list: diff --git a/src/sentry/deletions/defaults/sentry_app_installation.py b/src/sentry/deletions/defaults/sentry_app_installation.py index cefeb358b76c6..c064aea55ac0f 100644 --- a/src/sentry/deletions/defaults/sentry_app_installation.py +++ b/src/sentry/deletions/defaults/sentry_app_installation.py @@ -1,9 +1,12 @@ -from ..base import ModelDeletionTask, ModelRelation -from .apigrant import ModelApiGrantDeletionTask +from collections.abc import Sequence +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.deletions.defaults.apigrant import ModelApiGrantDeletionTask +from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation -class SentryAppInstallationDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): + +class SentryAppInstallationDeletionTask(ModelDeletionTask[SentryAppInstallation]): + def get_child_relations(self, instance: SentryAppInstallation) -> list[BaseRelation]: from sentry.models.apigrant import ApiGrant from sentry.models.integrations.sentry_app_installation_for_provider import ( SentryAppInstallationForProvider, @@ -20,5 +23,5 @@ def get_child_relations(self, instance): ), ] - def mark_deletion_in_progress(self, instance_list): + def mark_deletion_in_progress(self, instance_list: Sequence[SentryAppInstallation]) -> None: pass diff --git a/src/sentry/deletions/defaults/sentry_app_installation_token.py b/src/sentry/deletions/defaults/sentry_app_installation_token.py index 5c0b89d4a5c2d..b4aae9b537d8d 100644 --- a/src/sentry/deletions/defaults/sentry_app_installation_token.py +++ b/src/sentry/deletions/defaults/sentry_app_installation_token.py @@ -1,14 +1,19 @@ -from ..base import ModelDeletionTask, ModelRelation -from .apitoken import ModelApiTokenDeletionTask +from collections.abc import Sequence +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.deletions.defaults.apitoken import ModelApiTokenDeletionTask +from sentry.sentry_apps.models.sentry_app_installation_token import SentryAppInstallationToken -class SentryAppInstallationTokenDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): + +class SentryAppInstallationTokenDeletionTask(ModelDeletionTask[SentryAppInstallationToken]): + def get_child_relations(self, instance: SentryAppInstallationToken) -> list[BaseRelation]: from sentry.models.apitoken import ApiToken return [ ModelRelation(ApiToken, {"id": instance.api_token_id}, task=ModelApiTokenDeletionTask), ] - def mark_deletion_in_progress(self, instance_list): + def mark_deletion_in_progress( + self, instance_list: Sequence[SentryAppInstallationToken] + ) -> None: pass diff --git a/src/sentry/deletions/defaults/service_hook.py b/src/sentry/deletions/defaults/service_hook.py index 6c290162f927f..fdbc3de5fc44c 100644 --- a/src/sentry/deletions/defaults/service_hook.py +++ b/src/sentry/deletions/defaults/service_hook.py @@ -1,7 +1,8 @@ -from ..base import ModelDeletionTask +from sentry.deletions.base import ModelDeletionTask +from sentry.models.servicehook import ServiceHook -class ServiceHookDeletionTask(ModelDeletionTask): +class ServiceHookDeletionTask(ModelDeletionTask[ServiceHook]): # This subclass just represents an intentional decision to not cascade service hook deletions, and to # mark status using ObjectStatus on deletion. The behavior is identical to the base class # so that intentions are clear. diff --git a/src/sentry/deletions/defaults/team.py b/src/sentry/deletions/defaults/team.py index 5b2c4db201f3f..c0295f99f0b91 100644 --- a/src/sentry/deletions/defaults/team.py +++ b/src/sentry/deletions/defaults/team.py @@ -1,22 +1,25 @@ -from ..base import ModelDeletionTask, ModelRelation +from collections.abc import Sequence +from sentry.deletions.base import BaseRelation, ModelDeletionTask, ModelRelation +from sentry.models.team import Team -class TeamDeletionTask(ModelDeletionTask): - def get_child_relations(self, instance): + +class TeamDeletionTask(ModelDeletionTask[Team]): + def get_child_relations(self, instance: Team) -> list[BaseRelation]: from sentry.models.projectteam import ProjectTeam return [ ModelRelation(ProjectTeam, {"team_id": instance.id}), ] - def mark_deletion_in_progress(self, instance_list): + def mark_deletion_in_progress(self, instance_list: Sequence[Team]) -> None: from sentry.models.team import TeamStatus for instance in instance_list: if instance.status != TeamStatus.DELETION_IN_PROGRESS: instance.update(status=TeamStatus.DELETION_IN_PROGRESS) - def delete_instance(self, instance): + def delete_instance(self, instance: Team) -> None: from sentry.incidents.models.alert_rule import AlertRule from sentry.models.rule import Rule from sentry.monitors.models import Monitor diff --git a/src/sentry/deletions/manager.py b/src/sentry/deletions/manager.py index ce9e89810ead5..7f4e3615fbbc4 100644 --- a/src/sentry/deletions/manager.py +++ b/src/sentry/deletions/manager.py @@ -1,16 +1,18 @@ -from collections import defaultdict +from collections.abc import MutableMapping +from typing import Any + +from sentry.db.models.base import Model +from sentry.deletions.base import BaseDeletionTask __all__ = ["DeletionTaskManager"] class DeletionTaskManager: - def __init__(self, default_task=None): - self.tasks = {} + def __init__(self, default_task: type[BaseDeletionTask[Any]] | None = None) -> None: + self.tasks: MutableMapping[type[Model], type[BaseDeletionTask[Any]]] = {} self.default_task = default_task - self.dependencies = defaultdict(set) - self.bulk_dependencies = defaultdict(set) - def exec_sync(self, instance): + def exec_sync(self, instance: Model) -> None: task = self.get( model=type(instance), query={"id": instance.id}, @@ -18,7 +20,7 @@ def exec_sync(self, instance): while task.chunk(): pass - def exec_sync_many(self, instances): + def exec_sync_many(self, instances: list[Model]) -> None: if not instances: return @@ -29,20 +31,18 @@ def exec_sync_many(self, instances): while task.chunk(): pass - def get(self, task=None, **kwargs): + def get( + self, + task: type[BaseDeletionTask[Any]] | None = None, + **kwargs: Any, + ) -> BaseDeletionTask[Any]: if task is None: model = kwargs.get("model") - try: - task = self.tasks[model] - except KeyError: - task = self.default_task + assert model, "The model parameter is required if `task` is not provided" + task = self.tasks.get(model, self.default_task) + assert task is not None, "Task cannot be None" + return task(manager=self, **kwargs) - def register(self, model, task): + def register(self, model: type[Model], task: type[BaseDeletionTask[Any]]) -> None: self.tasks[model] = task - - def add_dependencies(self, model, dependencies): - self.dependencies[model] |= set(dependencies) - - def add_bulk_dependencies(self, model, dependencies): - self.bulk_dependencies[model] |= set(dependencies) diff --git a/src/sentry/deletions/tasks/hybrid_cloud.py b/src/sentry/deletions/tasks/hybrid_cloud.py index 2746167293ac8..59e2eae95956d 100644 --- a/src/sentry/deletions/tasks/hybrid_cloud.py +++ b/src/sentry/deletions/tasks/hybrid_cloud.py @@ -40,11 +40,11 @@ class WatermarkBatch: transaction_id: str -def get_watermark_key(prefix: str, field: HybridCloudForeignKey) -> str: +def get_watermark_key(prefix: str, field: HybridCloudForeignKey[Any, Any]) -> str: return f"{prefix}.{field.model._meta.db_table}.{field.name}" -def get_watermark(prefix: str, field: HybridCloudForeignKey) -> tuple[int, str]: +def get_watermark(prefix: str, field: HybridCloudForeignKey[Any, Any]) -> tuple[int, str]: with redis.clusters.get("default").get_local_client_for_key("deletions.watermark") as client: key = get_watermark_key(prefix, field) v = client.get(key) @@ -59,7 +59,7 @@ def get_watermark(prefix: str, field: HybridCloudForeignKey) -> tuple[int, str]: def set_watermark( - prefix: str, field: HybridCloudForeignKey, value: int, prev_transaction_id: str + prefix: str, field: HybridCloudForeignKey[Any, Any], value: int, prev_transaction_id: str ) -> None: with redis.clusters.get("default").get_local_client_for_key("deletions.watermark") as client: client.set( @@ -78,8 +78,8 @@ def set_watermark( def _chunk_watermark_batch( prefix: str, - field: HybridCloudForeignKey, - manager: BaseManager, + field: HybridCloudForeignKey[Any, Any], + manager: BaseManager[Any], *, batch_size: int, model: type[Model], @@ -116,7 +116,7 @@ def _chunk_watermark_batch( acks_late=True, silo_mode=SiloMode.CONTROL, ) -def schedule_hybrid_cloud_foreign_key_jobs_control(): +def schedule_hybrid_cloud_foreign_key_jobs_control() -> None: if options.get("hybrid_cloud.disable_tombstone_cleanup"): return @@ -131,7 +131,7 @@ def schedule_hybrid_cloud_foreign_key_jobs_control(): acks_late=True, silo_mode=SiloMode.REGION, ) -def schedule_hybrid_cloud_foreign_key_jobs(): +def schedule_hybrid_cloud_foreign_key_jobs() -> None: if options.get("hybrid_cloud.disable_tombstone_cleanup"): return @@ -258,7 +258,7 @@ def get_batch_size() -> int: def _process_tombstone_reconciliation( - field: HybridCloudForeignKey, + field: HybridCloudForeignKey[Any, Any], model: Any, tombstone_cls: type[TombstoneBase], row_after_tombstone: bool, @@ -266,7 +266,7 @@ def _process_tombstone_reconciliation( from sentry import deletions prefix = "tombstone" - watermark_manager: BaseManager = tombstone_cls.objects + watermark_manager: BaseManager[Any] = tombstone_cls.objects if row_after_tombstone: prefix = "row" watermark_manager = field.model.objects @@ -323,7 +323,7 @@ def _process_tombstone_reconciliation( def _get_model_ids_for_tombstone_cascade( tombstone_cls: type[TombstoneBase], model: type[Model], - field: HybridCloudForeignKey, + field: HybridCloudForeignKey[Any, Any], row_after_tombstone: bool, watermark_batch: WatermarkBatch, ) -> tuple[list[int], datetime.datetime]: @@ -399,7 +399,7 @@ def _get_model_ids_for_tombstone_cascade( def get_ids_cross_db_for_row_watermark( tombstone_cls: type[TombstoneBase], model: type[Model], - field: HybridCloudForeignKey, + field: HybridCloudForeignKey[Any, Any], row_watermark_batch: WatermarkBatch, ) -> tuple[list[int], datetime.datetime]: @@ -434,7 +434,7 @@ def get_ids_cross_db_for_row_watermark( def get_ids_cross_db_for_tombstone_watermark( tombstone_cls: type[TombstoneBase], model: type[Model], - field: HybridCloudForeignKey, + field: HybridCloudForeignKey[Any, Any], tombstone_watermark_batch: WatermarkBatch, ) -> tuple[list[int], datetime.datetime]: oldest_seen = timezone.now() diff --git a/src/sentry/deletions/tasks/scheduled.py b/src/sentry/deletions/tasks/scheduled.py index 72e615c5fbfc9..e0ae8daa4f6f1 100644 --- a/src/sentry/deletions/tasks/scheduled.py +++ b/src/sentry/deletions/tasks/scheduled.py @@ -31,7 +31,7 @@ acks_late=True, silo_mode=SiloMode.CONTROL, ) -def reattempt_deletions_control(): +def reattempt_deletions_control() -> None: _reattempt_deletions(ScheduledDeletion) @@ -41,7 +41,7 @@ def reattempt_deletions_control(): acks_late=True, silo_mode=SiloMode.REGION, ) -def reattempt_deletions(): +def reattempt_deletions() -> None: _reattempt_deletions(RegionScheduledDeletion) @@ -101,7 +101,7 @@ def _run_scheduled_deletions(model_class: type[BaseScheduledDeletion], process_t silo_mode=SiloMode.CONTROL, ) @retry(exclude=(DeleteAborted,)) -def run_deletion_control(deletion_id, first_pass=True, **kwargs: Any): +def run_deletion_control(deletion_id: int, first_pass: bool = True, **kwargs: Any) -> None: _run_deletion( deletion_id=deletion_id, first_pass=first_pass, @@ -119,7 +119,7 @@ def run_deletion_control(deletion_id, first_pass=True, **kwargs: Any): silo_mode=SiloMode.REGION, ) @retry(exclude=(DeleteAborted,)) -def run_deletion(deletion_id, first_pass=True, **kwargs: Any): +def run_deletion(deletion_id: int, first_pass: bool = True, **kwargs: Any) -> None: _run_deletion( deletion_id=deletion_id, first_pass=first_pass, diff --git a/src/sentry/models/scheduledeletion.py b/src/sentry/models/scheduledeletion.py index 021d2e6551469..60411b4a4e1dc 100644 --- a/src/sentry/models/scheduledeletion.py +++ b/src/sentry/models/scheduledeletion.py @@ -121,9 +121,13 @@ def get_model(self): def get_instance(self): from sentry import deletions + from sentry.deletions.base import ModelDeletionTask model = self.get_model() - query_manager = getattr(model, deletions.get(model=model, query=None).manager_name) + deletion_task = deletions.get(model=model, query=None) + query_manager = model.objects + if isinstance(deletion_task, ModelDeletionTask): + query_manager = getattr(model, deletion_task.manager_name) return query_manager.get(pk=self.object_id) def get_actor(self) -> RpcUser | None: diff --git a/src/sentry/tasks/repository.py b/src/sentry/tasks/repository.py index d49de2ff55371..054756150b0de 100644 --- a/src/sentry/tasks/repository.py +++ b/src/sentry/tasks/repository.py @@ -27,10 +27,6 @@ def repository_cascade_delete_on_hide(repo_id: int) -> None: while has_more: # get child relations child_relations = _get_repository_child_relations(repo) - # extend relations - child_relations = child_relations + [ - rel(repo) for rel in default_manager.dependencies[Repository] - ] # no need to filter relations; delete them if child_relations: has_more = _delete_children(manager=default_manager, relations=child_relations) From cf78c4fd49eb3f78add708ba06b5c4437881a09f Mon Sep 17 00:00:00 2001 From: Mark Story Date: Fri, 20 Sep 2024 10:39:09 -0400 Subject: [PATCH 03/44] chore: Don't send no-op RPC calls from serializers (#77810) When we have empty parameter lists there is no point in making an RPC call as we'll get nothing back. This should help reduce the number of RPC call failures that customers can experience and the overall volume as well. --- src/sentry/api/serializers/models/activity.py | 8 +++++--- src/sentry/api/serializers/models/group.py | 9 ++++++--- src/sentry/api/serializers/models/release.py | 17 ++++++++++------- 3 files changed, 21 insertions(+), 13 deletions(-) diff --git a/src/sentry/api/serializers/models/activity.py b/src/sentry/api/serializers/models/activity.py index 2a429113ca5a7..af60cec3770f8 100644 --- a/src/sentry/api/serializers/models/activity.py +++ b/src/sentry/api/serializers/models/activity.py @@ -17,9 +17,11 @@ def __init__(self, environment_func=None): def get_attrs(self, item_list, user, **kwargs): # TODO(dcramer); assert on relations user_ids = [i.user_id for i in item_list if i.user_id] - user_list = user_service.serialize_many( - filter={"user_ids": user_ids}, as_user=serialize_generic_user(user) - ) + user_list = [] + if user_ids: + user_list = user_service.serialize_many( + filter={"user_ids": user_ids}, as_user=serialize_generic_user(user) + ) users = {u["id"]: u for u in user_list} commit_ids = { diff --git a/src/sentry/api/serializers/models/group.py b/src/sentry/api/serializers/models/group.py index b9bc251d06820..924aafb1d71e5 100644 --- a/src/sentry/api/serializers/models/group.py +++ b/src/sentry/api/serializers/models/group.py @@ -183,9 +183,12 @@ def _serialize_assignees(self, item_list: Sequence[Group]) -> Mapping[int, Team for team in Team.objects.filter(id__in=all_team_ids.keys()): for group_id in all_team_ids[team.id]: result[group_id] = team - for user in user_service.get_many_by_id(ids=list(all_user_ids.keys())): - for group_id in all_user_ids[user.id]: - result[group_id] = user + + user_ids = list(all_user_ids.keys()) + if user_ids: + for user in user_service.get_many_by_id(ids=user_ids): + for group_id in all_user_ids[user.id]: + result[group_id] = user return result diff --git a/src/sentry/api/serializers/models/release.py b/src/sentry/api/serializers/models/release.py index 483b51c6b06f9..8b73b870a314b 100644 --- a/src/sentry/api/serializers/models/release.py +++ b/src/sentry/api/serializers/models/release.py @@ -458,13 +458,16 @@ def get_attrs(self, item_list, user, **kwargs): issue_counts_by_release, ) = self.__get_release_data_with_environments(release_project_envs) - owners = { - d["id"]: d - for d in user_service.serialize_many( - filter={"user_ids": [i.owner_id for i in item_list if i.owner_id]}, - as_user=serialize_generic_user(user), - ) - } + owners = {} + owner_ids = [i.owner_id for i in item_list if i.owner_id] + if owner_ids: + owners = { + d["id"]: d + for d in user_service.serialize_many( + filter={"user_ids": owner_ids}, + as_user=serialize_generic_user(user), + ) + } authors_metadata_attrs = _get_authors_metadata(item_list, user) release_metadata_attrs = _get_last_commit_metadata(item_list, user) From ce2394326ece232b029529bebbebb2f7eb739b54 Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Fri, 20 Sep 2024 10:57:25 -0400 Subject: [PATCH 04/44] ref(releases): dont send back release stats on update (#77832) I can't think of a reason why release stats are super necessary on updates to a release. This causes extra load on snuba, and can also result in release update failures when snuba is being rate limited or 429ing. If this is too aggressive, we could consider catching 429's in the release serializer and making them empty instead. --- src/sentry/api/endpoints/organization_release_details.py | 9 +++++++-- src/sentry/api/endpoints/organization_releases.py | 4 +++- src/sentry/api/endpoints/project_release_details.py | 9 +++++++-- src/sentry/options/defaults.py | 7 +++++++ 4 files changed, 24 insertions(+), 5 deletions(-) diff --git a/src/sentry/api/endpoints/organization_release_details.py b/src/sentry/api/endpoints/organization_release_details.py index 91c47608178fd..d0f9986c181e9 100644 --- a/src/sentry/api/endpoints/organization_release_details.py +++ b/src/sentry/api/endpoints/organization_release_details.py @@ -5,7 +5,7 @@ from rest_framework.response import Response from rest_framework.serializers import ListField -from sentry import release_health +from sentry import options, release_health from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import ReleaseAnalyticsMixin, region_silo_endpoint @@ -526,7 +526,12 @@ def put(self, request: Request, organization, version) -> Response: datetime=release.date_released, ) - return Response(serialize(release, request.user)) + no_snuba_for_release_creation = options.get("releases.no_snuba_for_release_creation") + return Response( + serialize( + release, request.user, no_snuba_for_release_creation=no_snuba_for_release_creation + ) + ) @extend_schema( operation_id="Delete an Organization's Release", diff --git a/src/sentry/api/endpoints/organization_releases.py b/src/sentry/api/endpoints/organization_releases.py index a905454461ef2..de668f2730237 100644 --- a/src/sentry/api/endpoints/organization_releases.py +++ b/src/sentry/api/endpoints/organization_releases.py @@ -588,7 +588,9 @@ def post(self, request: Request, organization) -> Response: update_org_auth_token_last_used(request.auth, [project.id for project in projects]) scope.set_tag("success_status", status) - return Response(serialize(release, request.user), status=status) + return Response( + serialize(release, request.user, no_snuba_for_release_creation=True), status=status + ) scope.set_tag("failure_reason", "serializer_error") return Response(serializer.errors, status=400) diff --git a/src/sentry/api/endpoints/project_release_details.py b/src/sentry/api/endpoints/project_release_details.py index e07d96205fdde..be4305d6eb44c 100644 --- a/src/sentry/api/endpoints/project_release_details.py +++ b/src/sentry/api/endpoints/project_release_details.py @@ -2,6 +2,7 @@ from rest_framework.request import Request from rest_framework.response import Response +from sentry import options from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import ReleaseAnalyticsMixin, region_silo_endpoint from sentry.api.bases.project import ProjectEndpoint, ProjectReleasePermission @@ -144,8 +145,12 @@ def put(self, request: Request, project, version) -> Response: data={"version": release.version}, datetime=release.date_released, ) - - return Response(serialize(release, request.user)) + no_snuba_for_release_creation = options.get("releases.no_snuba_for_release_creation") + return Response( + serialize( + release, request.user, no_snuba_for_release_creation=no_snuba_for_release_creation + ) + ) def delete(self, request: Request, project, version) -> Response: """ diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 03ec73e690e70..b884aa35e0dd8 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -2729,3 +2729,10 @@ default=[], flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE, ) + +register( + "releases.no_snuba_for_release_creation", + type=Bool, + default=False, + flags=FLAG_AUTOMATOR_MODIFIABLE, +) From 083f5c78a58a4ee4f9cfa51526f29924adc14f09 Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Fri, 20 Sep 2024 12:33:32 -0400 Subject: [PATCH 05/44] chore(dashboards): Reduce instrumentation for `AutoSizedText` (#77597) Rollout's basically done, this is the last thing. I don't want to create transactions for every resize, that's too much noise. 1. Remove some unnecessary tags (e.g., iteration exceeded) since that condition has mostly disappeared. The iteration count makes the same point 2. Do not create a transaction. In fact, do the opposite: attach a span to an existing transaction if it exists, otherwise just drop it, it's okay 3. Remove custom duration measurements. This has done its job, and it wasn't nearly as relevant for span durations as transaction durations anyway --- .../dashboards/widgetCard/autoSizedText.tsx | 78 +++++++++---------- 1 file changed, 35 insertions(+), 43 deletions(-) diff --git a/static/app/views/dashboards/widgetCard/autoSizedText.tsx b/static/app/views/dashboards/widgetCard/autoSizedText.tsx index 81c28f053f0cf..f6e4a58ca57f5 100644 --- a/static/app/views/dashboards/widgetCard/autoSizedText.tsx +++ b/static/app/views/dashboards/widgetCard/autoSizedText.tsx @@ -51,52 +51,44 @@ export function AutoSizedText({children}: Props) { let iterationCount = 0; - Sentry.withScope(scope => { - const span = Sentry.startInactiveSpan({ - op: 'function', - name: 'AutoSizedText.iterate', - forceTransaction: true, - }); - - const t1 = performance.now(); - - // Run the resize iteration in a loop. This blocks the main UI thread and prevents - // visible layout jitter. If this was done through a `ResizeObserver` or React State - // each step in the resize iteration would be visible to the user - while (iterationCount <= ITERATION_LIMIT) { - const childDimensions = getElementDimensions(childElement); - - const widthDifference = parentDimensions.width - childDimensions.width; - const heightDifference = parentDimensions.height - childDimensions.height; - - const childFitsIntoParent = heightDifference >= 0 && widthDifference >= 0; - const childIsWithinWidthTolerance = - Math.abs(widthDifference) <= MAXIMUM_DIFFERENCE; - const childIsWithinHeightTolerance = - Math.abs(heightDifference) <= MAXIMUM_DIFFERENCE; - - if ( - childFitsIntoParent && - (childIsWithinWidthTolerance || childIsWithinHeightTolerance) - ) { - // Stop the iteration, we've found a fit! - span.setAttribute('widthDifference', widthDifference); - span.setAttribute('heightDifference', heightDifference); - break; - } - - adjustFontSize(childDimensions, parentDimensions); - - iterationCount += 1; + const span = Sentry.startInactiveSpan({ + op: 'function', + name: 'AutoSizedText.iterate', + onlyIfParent: true, + }); + + // Run the resize iteration in a loop. This blocks the main UI thread and prevents + // visible layout jitter. If this was done through a `ResizeObserver` or React State + // each step in the resize iteration would be visible to the user + while (iterationCount <= ITERATION_LIMIT) { + const childDimensions = getElementDimensions(childElement); + + const widthDifference = parentDimensions.width - childDimensions.width; + const heightDifference = parentDimensions.height - childDimensions.height; + + const childFitsIntoParent = heightDifference >= 0 && widthDifference >= 0; + const childIsWithinWidthTolerance = + Math.abs(widthDifference) <= MAXIMUM_DIFFERENCE; + const childIsWithinHeightTolerance = + Math.abs(heightDifference) <= MAXIMUM_DIFFERENCE; + + if ( + childFitsIntoParent && + (childIsWithinWidthTolerance || childIsWithinHeightTolerance) + ) { + // Stop the iteration, we've found a fit! + span.setAttribute('widthDifference', widthDifference); + span.setAttribute('heightDifference', heightDifference); + break; } - const t2 = performance.now(); - scope.setTag('didExceedIterationLimit', iterationCount >= ITERATION_LIMIT); + adjustFontSize(childDimensions, parentDimensions); - span.setAttribute('iterationCount', iterationCount); - span.setAttribute('durationFromPerformanceAPI', t2 - t1); - span.end(); - }); + iterationCount += 1; + } + + span.setAttribute('iterationCount', iterationCount); + span.end(); }); observer.observe(parentElement); From fbacb2d72a62a9ca30f6b7ee8011ea23084932f7 Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Fri, 20 Sep 2024 09:48:16 -0700 Subject: [PATCH 06/44] :sparkles: feat(azure_devops): Update Azure DevOps Integration (#77736) # Update: i also updated the IdentityProvider logic so that it uses the new OAuth flow if the organization has the feature flag. the main difference between the existing provider and the new provider is i changed the `/authorize` and `/token` urls, the parameters we pass to each of these endpoints & updated the refresh token params. --- Part of addressing: https://github.com/getsentry/sentry/issues/77570 In this pr, i add logic to migrate existing integrations to the new app. in the integration installation pipeline, we check for a `integration_migrated` key in metadata. If it doesn't exist, we create a new subscription and add it to the integration dict that `build_integration` returns. we will also set the `integration_migration` key to be True. In `pipelin.py` - https://github.com/getsentry/sentry/blob/c1ebcc18b4c6a41311f45dee35482553feb0f922/src/sentry/integrations/pipeline.py#L88-L129 the data (the dict that we returned earlier) will then be passed to `ensure_integration`, which ends up saving the new user credentials as well as the migrated key: https://github.com/getsentry/sentry/blob/c1ebcc18b4c6a41311f45dee35482553feb0f922/src/sentry/integrations/pipeline.py#L29-L41 --- fixtures/vsts.py | 23 +++- src/sentry/auth/helper.py | 2 +- src/sentry/features/temporary.py | 2 + src/sentry/identity/__init__.py | 3 +- src/sentry/identity/pipeline.py | 16 ++- src/sentry/identity/vsts/provider.py | 112 ++++++++++++++++++ src/sentry/integrations/vsts/client.py | 21 +++- src/sentry/integrations/vsts/integration.py | 51 +++++++- src/sentry/options/defaults.py | 5 + src/sentry/pipeline/base.py | 4 +- .../integrations/vsts/test_integration.py | 99 +++++++++++++++- 11 files changed, 326 insertions(+), 12 deletions(-) diff --git a/fixtures/vsts.py b/fixtures/vsts.py index 76ad6d83519c3..4c9f4678670f2 100644 --- a/fixtures/vsts.py +++ b/fixtures/vsts.py @@ -66,6 +66,17 @@ def _stub_vsts(self): }, ) + responses.add( + responses.POST, + "https://login.microsoftonline.com/common/oauth2/v2.0/token", + json={ + "access_token": self.access_token, + "token_type": "grant", + "expires_in": 300, # seconds (5 min) + "refresh_token": self.refresh_token, + }, + ) + responses.add( responses.GET, "https://app.vssps.visualstudio.com/_apis/accounts?memberId=%s&api-version=4.1" @@ -195,19 +206,27 @@ def assert_vsts_oauth_redirect(self, redirect): assert redirect.netloc == "app.vssps.visualstudio.com" assert redirect.path == "/oauth2/authorize" + def assert_vsts_new_oauth_redirect(self, redirect): + assert redirect.scheme == "https" + assert redirect.netloc == "login.microsoftonline.com" + assert redirect.path == "/common/oauth2/v2.0/authorize" + def assert_account_selection(self, response, account_id=None): account_id = account_id or self.vsts_account_id assert response.status_code == 200 assert f'