diff --git a/src/sentry/api/endpoints/organization_fork.py b/src/sentry/api/endpoints/organization_fork.py new file mode 100644 index 0000000000000..9cc911156a4bd --- /dev/null +++ b/src/sentry/api/endpoints/organization_fork.py @@ -0,0 +1,158 @@ +import logging +from string import Template + +from django.db import router +from rest_framework import status +from rest_framework.request import Request +from rest_framework.response import Response + +from sentry.api.api_owners import ApiOwner +from sentry.api.api_publish_status import ApiPublishStatus +from sentry.api.base import Endpoint, region_silo_endpoint +from sentry.api.endpoints.relocations.index import ( + get_autopause_value, + validate_relocation_uniqueness, +) +from sentry.api.permissions import SuperuserOrStaffFeatureFlaggedPermission +from sentry.api.serializers import serialize +from sentry.hybridcloud.services.organization_mapping import organization_mapping_service +from sentry.models.relocation import Relocation +from sentry.organizations.services.organization import organization_service +from sentry.tasks.relocation import uploading_start +from sentry.types.region import get_local_region +from sentry.utils.db import atomic_transaction + +ERR_DUPLICATE_ORGANIZATION_FORK = Template( + "This organization is already in the process of being forked, relocation id: $uuid" +) +ERR_ORGANIZATION_NOT_FOUND = Template("The target organization `$pointer` could not be found.") +ERR_ORGANIZATION_MAPPING_NOT_FOUND = Template( + "The target organization `$slug` has no region mapping." +) +ERR_CANNOT_FORK_INTO_SAME_REGION = Template( + "The organization already lives in region `$region`, so it cannot be forked into that region." +) + +logger = logging.getLogger(__name__) + + +@region_silo_endpoint +class OrganizationForkEndpoint(Endpoint): + owner = ApiOwner.OPEN_SOURCE + publish_status = { + "POST": ApiPublishStatus.EXPERIMENTAL, + } + permission_classes = (SuperuserOrStaffFeatureFlaggedPermission,) + + def post(self, request: Request, organization_id_or_slug) -> Response: + """ + Duplicate an organization across regions. The old organization remains untouched. We kick + off the standard `SAAS_TO_SAAS` relocation flow to create a duplicate in this region. + + Because each region silo of the API has its own version of this endpoint, we assume that the + target region for the fork is the owning region of the API on which this was called. For + example, if we call this endpoint at `us.sentry.io`, we are implicitly saying we would like + the target organization forked INTO the `us` region. + `````````````````````````````````````````````````` + + :pparam string org_slug: the id or slug of the organization + + :auth: required + """ + + logger.info("relocations.fork.post.start", extra={"caller": request.user.id}) + + org_retrieval_args = { + "only_visible": True, + "include_projects": False, + "include_teams": False, + } + org_context = ( + organization_service.get_organization_by_id(id=organization_id_or_slug) + if str(organization_id_or_slug).isdecimal() + else organization_service.get_organization_by_slug( + slug=organization_id_or_slug, **org_retrieval_args + ) + ) + if not org_context: + return Response( + { + "detail": ERR_ORGANIZATION_NOT_FOUND.substitute( + pointer=organization_id_or_slug, + ) + }, + status=status.HTTP_404_NOT_FOUND, + ) + + organization = org_context.organization + org_slug = organization.slug + org_mapping = organization_mapping_service.get(organization_id=organization.id) + if not org_mapping: + return Response( + { + "detail": ERR_ORGANIZATION_NOT_FOUND.substitute( + slug=org_slug, + ) + }, + status=status.HTTP_404_NOT_FOUND, + ) + + # Figure out which region the organization being forked lives in. + requesting_region_name = get_local_region().name + replying_region_name = org_mapping.region_name + if replying_region_name == requesting_region_name: + return Response( + { + "detail": ERR_CANNOT_FORK_INTO_SAME_REGION.substitute( + region=requesting_region_name, + ) + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # If there is an in-progress relocation into this region with for this org already, block + # this one until that one resolves. + duplicate_relocation = Relocation.objects.filter( + provenance=Relocation.Provenance.SAAS_TO_SAAS.value, + want_org_slugs=[organization.slug], + status__in={Relocation.Status.IN_PROGRESS.value, Relocation.Status.PAUSE.value}, + ).first() + if duplicate_relocation is not None: + return Response( + { + "detail": ERR_DUPLICATE_ORGANIZATION_FORK.substitute( + uuid=duplicate_relocation.uuid + ) + }, + status=status.HTTP_409_CONFLICT, + ) + + # Identify who will be the owner of the newly forked organization, and ensure that they + # don't already have relocations in flight. + owners = organization.get_owners() + owner = owners[0] if len(owners) > 0 else request.user + err = validate_relocation_uniqueness(owner) + if err is not None: + return err + + # We do not create a `RelocationFile` yet. Instead, we trigger a series of RPC calls (via + # `uploading_start`, scheduled below) to create an export of the organization we are seeking + # duplicate from the foreign region. + with atomic_transaction(using=(router.db_for_write(Relocation))): + new_relocation: Relocation = Relocation.objects.create( + creator_id=request.user.id, + owner_id=owner.id, + step=Relocation.Step.UPLOADING.value, + scheduled_pause_at_step=get_autopause_value(), + provenance=Relocation.Provenance.SAAS_TO_SAAS, + want_org_slugs=[organization.slug], + ) + + # Kick off the asynchronous process of exporting the relocation from the partner region. + # When we received this back (via RPC call), we'll be able to continue with the usual + # relocation flow, picking up from the `uploading_complete` task. + uploading_start.apply_async( + args=[new_relocation.uuid, replying_region_name, organization.slug] + ) + + return Response(serialize(new_relocation), status=status.HTTP_201_CREATED) diff --git a/src/sentry/api/endpoints/relocations/index.py b/src/sentry/api/endpoints/relocations/index.py index 70f699699d07e..a36f363a494fc 100644 --- a/src/sentry/api/endpoints/relocations/index.py +++ b/src/sentry/api/endpoints/relocations/index.py @@ -4,6 +4,7 @@ from functools import reduce from string import Template +from django.contrib.auth.models import AnonymousUser from django.db import router from django.db.models import Q from django.utils import timezone @@ -24,7 +25,7 @@ from sentry.auth.elevated_mode import has_elevated_mode from sentry.models.files.file import File from sentry.models.relocation import Relocation, RelocationFile -from sentry.models.user import MAX_USERNAME_LENGTH +from sentry.models.user import MAX_USERNAME_LENGTH, User from sentry.options import get from sentry.search.utils import tokenize_query from sentry.signals import relocation_link_promo_code @@ -128,7 +129,7 @@ def validate_new_relocation_request( return None -def validate_relocation_uniqueness(owner: RpcUser) -> Response | None: +def validate_relocation_uniqueness(owner: RpcUser | AnonymousUser | User) -> Response | None: # Check that this `owner` does not have more than one active `Relocation` in flight. if Relocation.objects.filter( owner_id=owner.id, @@ -277,7 +278,7 @@ def post(self, request: Request) -> Response: relocation_link_promo_code.send_robust( relocation_uuid=relocation.uuid, promo_code=promo_code, sender=self.__class__ ) - uploading_start.delay(relocation.uuid, None, None) + uploading_start.apply_async(args=[relocation.uuid, None, None]) try: analytics.record( "relocation.created", diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index 518eb2e19d8cb..a969b30bdf79d 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -14,6 +14,7 @@ from sentry.api.endpoints.organization_events_root_cause_analysis import ( OrganizationEventsRootCauseAnalysisEndpoint, ) +from sentry.api.endpoints.organization_fork import OrganizationForkEndpoint from sentry.api.endpoints.organization_integration_migrate_opsgenie import ( OrganizationIntegrationMigrateOpsgenieEndpoint, ) @@ -2168,6 +2169,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: OrganizationRegionEndpoint.as_view(), name="sentry-api-0-organization-region", ), + # Trigger relocation + re_path( + r"^(?P[^\/]+)/fork/$", + OrganizationForkEndpoint.as_view(), + name="sentry-api-0-organization-fork", + ), ] PROJECT_URLS: list[URLPattern | URLResolver] = [ diff --git a/tests/sentry/api/endpoints/relocations/test_index.py b/tests/sentry/api/endpoints/relocations/test_index.py index caa5aea294a45..a6f99b282ff53 100644 --- a/tests/sentry/api/endpoints/relocations/test_index.py +++ b/tests/sentry/api/endpoints/relocations/test_index.py @@ -305,7 +305,7 @@ def tmp_keys(self, tmp_dir: str) -> tuple[Path, Path]: return (tmp_priv_key_path, tmp_pub_key_path) @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) - @patch("sentry.tasks.relocation.uploading_start.delay") + @patch("sentry.tasks.relocation.uploading_start.apply_async") def test_good_simple( self, uploading_start_mock: Mock, @@ -351,6 +351,7 @@ def test_good_simple( assert RelocationFile.objects.count() == relocation_file_count + 1 assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with(args=[UUID(response.data["uuid"]), None, None]) assert analytics_record_mock.call_count == 1 analytics_record_mock.assert_called_with( @@ -368,7 +369,7 @@ def test_good_simple( ) @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) - @patch("sentry.tasks.relocation.uploading_start.delay") + @patch("sentry.tasks.relocation.uploading_start.apply_async") def test_good_promo_code( self, uploading_start_mock: Mock, @@ -415,6 +416,7 @@ def test_good_promo_code( assert RelocationFile.objects.count() == relocation_file_count + 1 assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with(args=[UUID(response.data["uuid"]), None, None]) assert analytics_record_mock.call_count == 1 analytics_record_mock.assert_called_with( @@ -438,7 +440,7 @@ def test_good_promo_code( "relocation.autopause": "IMPORTING", } ) - @patch("sentry.tasks.relocation.uploading_start.delay") + @patch("sentry.tasks.relocation.uploading_start.apply_async") def test_good_with_valid_autopause_option( self, uploading_start_mock: Mock, @@ -469,6 +471,7 @@ def test_good_with_valid_autopause_option( assert response.data["scheduledPauseAtStep"] == Relocation.Step.IMPORTING.name assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with(args=[UUID(response.data["uuid"]), None, None]) assert analytics_record_mock.call_count == 1 analytics_record_mock.assert_called_with( @@ -492,7 +495,7 @@ def test_good_with_valid_autopause_option( "relocation.autopause": "DOESNOTEXIST", } ) - @patch("sentry.tasks.relocation.uploading_start.delay") + @patch("sentry.tasks.relocation.uploading_start.apply_async") def test_good_with_invalid_autopause_option( self, uploading_start_mock: Mock, @@ -523,6 +526,8 @@ def test_good_with_invalid_autopause_option( assert response.data["scheduledPauseAtStep"] is None assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with(args=[UUID(response.data["uuid"]), None, None]) + assert analytics_record_mock.call_count == 1 analytics_record_mock.assert_called_with( "relocation.created", @@ -541,7 +546,7 @@ def test_good_with_invalid_autopause_option( @override_options( {"relocation.enabled": False, "relocation.daily-limit.small": 1, "staff.ga-rollout": True} ) - @patch("sentry.tasks.relocation.uploading_start.delay") + @patch("sentry.tasks.relocation.uploading_start.apply_async") def test_good_staff_when_feature_disabled( self, uploading_start_mock: Mock, @@ -585,6 +590,7 @@ def test_good_staff_when_feature_disabled( assert RelocationFile.objects.count() == relocation_file_count + 1 assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with(args=[UUID(response.data["uuid"]), None, None]) assert analytics_record_mock.call_count == 1 analytics_record_mock.assert_called_with( @@ -602,7 +608,7 @@ def test_good_staff_when_feature_disabled( ) @override_options({"relocation.enabled": False, "relocation.daily-limit.small": 1}) - @patch("sentry.tasks.relocation.uploading_start.delay") + @patch("sentry.tasks.relocation.uploading_start.apply_async") def test_good_superuser_when_feature_disabled( self, uploading_start_mock: Mock, @@ -646,6 +652,7 @@ def test_good_superuser_when_feature_disabled( assert RelocationFile.objects.count() == relocation_file_count + 1 assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with(args=[UUID(response.data["uuid"]), None, None]) assert analytics_record_mock.call_count == 1 analytics_record_mock.assert_called_with( @@ -725,7 +732,7 @@ def test_bad_expired_superuser_when_feature_disabled( ]: @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) - @patch("sentry.tasks.relocation.uploading_start.delay") + @patch("sentry.tasks.relocation.uploading_start.apply_async") def test_good_valid_org_slugs( self, uploading_start_mock: Mock, @@ -761,6 +768,7 @@ def test_good_valid_org_slugs( assert RelocationFile.objects.count() == relocation_file_count + 1 assert Relocation.objects.get(owner_id=self.owner.id).want_org_slugs == expected assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with(args=[UUID(response.data["uuid"]), None, None]) assert analytics_record_mock.call_count == 1 analytics_record_mock.assert_called_with( @@ -785,7 +793,7 @@ def test_good_valid_org_slugs( ]: @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) - @patch("sentry.tasks.relocation.uploading_start.delay") + @patch("sentry.tasks.relocation.uploading_start.apply_async") def test_bad_invalid_org_slugs( self, analytics_record_mock: Mock, diff --git a/tests/sentry/api/endpoints/test_organization_fork.py b/tests/sentry/api/endpoints/test_organization_fork.py new file mode 100644 index 0000000000000..c958e8e90c6ae --- /dev/null +++ b/tests/sentry/api/endpoints/test_organization_fork.py @@ -0,0 +1,473 @@ +from unittest.mock import Mock, patch +from uuid import UUID + +from sentry.api.endpoints.organization_fork import ( + ERR_CANNOT_FORK_INTO_SAME_REGION, + ERR_DUPLICATE_ORGANIZATION_FORK, + ERR_ORGANIZATION_NOT_FOUND, +) +from sentry.models.organization import OrganizationStatus +from sentry.models.relocation import Relocation, RelocationFile +from sentry.silo.base import SiloMode +from sentry.testutils.cases import APITestCase +from sentry.testutils.helpers.options import override_options +from sentry.testutils.silo import assume_test_silo_mode, create_test_regions, region_silo_test + +REQUESTING_TEST_REGION = "requesting" +EXPORTING_TEST_REGION = "exporting" +SAAS_TO_SAAS_TEST_REGIONS = create_test_regions(REQUESTING_TEST_REGION, EXPORTING_TEST_REGION) + + +@region_silo_test(regions=SAAS_TO_SAAS_TEST_REGIONS) +class OrganizationForkTest(APITestCase): + endpoint = "sentry-api-0-organization-fork" + method = "POST" + + def setUp(self): + super().setUp() + self.superuser = self.create_user(is_superuser=True) + self.staff_user = self.create_user(is_staff=True) + self.existing_org_owner = self.create_user( + email="existing_org_owner@example.com", + is_superuser=False, + is_staff=False, + is_active=True, + ) + + self.requested_org_slug = "testing" + self.existing_org = self.create_organization( + name=self.requested_org_slug, + owner=self.existing_org_owner, + region=EXPORTING_TEST_REGION, + ) + + @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) + @patch("sentry.tasks.relocation.uploading_start.apply_async") + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_good_simple( + self, + uploading_start_mock: Mock, + ): + self.login_as(user=self.superuser, superuser=True) + relocation_count = Relocation.objects.count() + relocation_file_count = RelocationFile.objects.count() + + response = self.get_success_response(self.existing_org.slug) + + assert response.data["status"] == Relocation.Status.IN_PROGRESS.name + assert response.data["step"] == Relocation.Step.UPLOADING.name + assert response.data["provenance"] == Relocation.Provenance.SAAS_TO_SAAS.name + assert response.data["scheduledPauseAtStep"] is None + assert response.data["creator"]["id"] == str(self.superuser.id) + assert response.data["creator"]["email"] == str(self.superuser.email) + assert response.data["creator"]["username"] == str(self.superuser.username) + assert response.data["owner"]["id"] == str(self.existing_org_owner.id) + assert response.data["owner"]["email"] == str(self.existing_org_owner.email) + assert response.data["owner"]["username"] == str(self.existing_org_owner.username) + + relocation: Relocation = Relocation.objects.get(owner_id=self.existing_org_owner.id) + assert str(relocation.uuid) == response.data["uuid"] + assert relocation.want_org_slugs == [self.requested_org_slug] + assert Relocation.objects.count() == relocation_count + 1 + assert RelocationFile.objects.count() == relocation_file_count + + assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with( + args=[UUID(response.data["uuid"]), EXPORTING_TEST_REGION, self.requested_org_slug] + ) + + @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) + @patch("sentry.tasks.relocation.uploading_start.apply_async") + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_good_simple_using_organization_id( + self, + uploading_start_mock: Mock, + ): + self.login_as(user=self.superuser, superuser=True) + relocation_count = Relocation.objects.count() + relocation_file_count = RelocationFile.objects.count() + + response = self.get_success_response(self.existing_org.id) + + assert response.data["status"] == Relocation.Status.IN_PROGRESS.name + assert response.data["step"] == Relocation.Step.UPLOADING.name + assert response.data["provenance"] == Relocation.Provenance.SAAS_TO_SAAS.name + + relocation: Relocation = Relocation.objects.get(owner_id=self.existing_org_owner.id) + assert str(relocation.uuid) == response.data["uuid"] + assert relocation.want_org_slugs == [self.requested_org_slug] + assert Relocation.objects.count() == relocation_count + 1 + assert RelocationFile.objects.count() == relocation_file_count + + assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with( + args=[UUID(response.data["uuid"]), EXPORTING_TEST_REGION, self.requested_org_slug] + ) + + @override_options( + { + "relocation.enabled": True, + "relocation.daily-limit.small": 1, + "relocation.autopause": "IMPORTING", + } + ) + @patch("sentry.tasks.relocation.uploading_start.apply_async") + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_good_with_valid_autopause_option( + self, + uploading_start_mock: Mock, + ): + self.login_as(user=self.superuser, superuser=True) + + response = self.get_success_response(self.existing_org.slug) + + assert response.data["status"] == Relocation.Status.IN_PROGRESS.name + assert response.data["step"] == Relocation.Step.UPLOADING.name + assert response.data["provenance"] == Relocation.Provenance.SAAS_TO_SAAS.name + assert response.data["scheduledPauseAtStep"] == Relocation.Step.IMPORTING.name + + assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with( + args=[UUID(response.data["uuid"]), EXPORTING_TEST_REGION, self.requested_org_slug] + ) + + @override_options( + {"relocation.enabled": False, "relocation.daily-limit.small": 1, "staff.ga-rollout": True} + ) + @patch("sentry.tasks.relocation.uploading_start.apply_async") + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_good_staff_when_feature_disabled( + self, + uploading_start_mock: Mock, + ): + self.login_as(user=self.staff_user, staff=True) + relocation_count = Relocation.objects.count() + relocation_file_count = RelocationFile.objects.count() + + response = self.get_success_response(self.existing_org.slug) + + assert response.data["status"] == Relocation.Status.IN_PROGRESS.name + assert response.data["step"] == Relocation.Step.UPLOADING.name + assert response.data["provenance"] == Relocation.Provenance.SAAS_TO_SAAS.name + assert response.data["scheduledPauseAtStep"] is None + assert response.data["creator"]["id"] == str(self.staff_user.id) + assert response.data["creator"]["email"] == str(self.staff_user.email) + assert response.data["creator"]["username"] == str(self.staff_user.username) + assert response.data["owner"]["id"] == str(self.existing_org_owner.id) + assert response.data["owner"]["email"] == str(self.existing_org_owner.email) + assert response.data["owner"]["username"] == str(self.existing_org_owner.username) + + relocation: Relocation = Relocation.objects.get(owner_id=self.existing_org_owner.id) + assert str(relocation.uuid) == response.data["uuid"] + assert relocation.want_org_slugs == [self.requested_org_slug] + assert Relocation.objects.count() == relocation_count + 1 + assert RelocationFile.objects.count() == relocation_file_count + + assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with( + args=[UUID(response.data["uuid"]), EXPORTING_TEST_REGION, self.requested_org_slug] + ) + + @override_options({"relocation.enabled": False, "relocation.daily-limit.small": 1}) + @patch("sentry.tasks.relocation.uploading_start.apply_async") + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_good_superuser_when_feature_disabled( + self, + uploading_start_mock: Mock, + ): + self.login_as(user=self.superuser, superuser=True) + relocation_count = Relocation.objects.count() + relocation_file_count = RelocationFile.objects.count() + + response = self.get_success_response(self.existing_org.slug) + + assert response.data["status"] == Relocation.Status.IN_PROGRESS.name + assert response.data["step"] == Relocation.Step.UPLOADING.name + assert response.data["provenance"] == Relocation.Provenance.SAAS_TO_SAAS.name + assert response.data["scheduledPauseAtStep"] is None + assert response.data["creator"]["id"] == str(self.superuser.id) + assert response.data["creator"]["email"] == str(self.superuser.email) + assert response.data["creator"]["username"] == str(self.superuser.username) + assert response.data["owner"]["id"] == str(self.existing_org_owner.id) + assert response.data["owner"]["email"] == str(self.existing_org_owner.email) + assert response.data["owner"]["username"] == str(self.existing_org_owner.username) + + relocation: Relocation = Relocation.objects.get(owner_id=self.existing_org_owner.id) + assert str(relocation.uuid) == response.data["uuid"] + assert relocation.want_org_slugs == [self.requested_org_slug] + assert Relocation.objects.count() == relocation_count + 1 + assert RelocationFile.objects.count() == relocation_file_count + + assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with( + args=[UUID(response.data["uuid"]), EXPORTING_TEST_REGION, self.requested_org_slug] + ) + + @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) + @patch("sentry.tasks.relocation.uploading_start.apply_async") + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_bad_organization_not_found( + self, + uploading_start_mock: Mock, + ): + self.login_as(user=self.superuser, superuser=True) + relocation_count = Relocation.objects.count() + relocation_file_count = RelocationFile.objects.count() + + response = response = self.get_error_response("does-not-exist", status_code=404) + + assert response.data.get("detail") is not None + assert response.data.get("detail") == ERR_ORGANIZATION_NOT_FOUND.substitute( + pointer="does-not-exist" + ) + assert uploading_start_mock.call_count == 0 + assert Relocation.objects.count() == relocation_count + assert RelocationFile.objects.count() == relocation_file_count + + @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) + @patch("sentry.tasks.relocation.uploading_start.apply_async") + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_bad_cannot_fork_deleted_organization( + self, + uploading_start_mock: Mock, + ): + self.login_as(user=self.superuser, superuser=True) + + self.existing_org.status = OrganizationStatus.DELETION_IN_PROGRESS + self.existing_org.save() + + relocation_count = Relocation.objects.count() + relocation_file_count = RelocationFile.objects.count() + + response = response = self.get_error_response(self.existing_org.slug, status_code=404) + + assert response.data.get("detail") is not None + assert response.data.get("detail") == ERR_ORGANIZATION_NOT_FOUND.substitute( + pointer=self.existing_org.slug + ) + assert uploading_start_mock.call_count == 0 + assert Relocation.objects.count() == relocation_count + assert RelocationFile.objects.count() == relocation_file_count + + @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) + @patch("sentry.tasks.relocation.uploading_start.apply_async") + # Note that for this test we've changed this to `EXPORTING_TEST_REGION` + @assume_test_silo_mode(SiloMode.REGION, region_name=EXPORTING_TEST_REGION) + def test_bad_organization_already_in_region( + self, + uploading_start_mock: Mock, + ): + self.login_as(user=self.superuser, superuser=True) + relocation_count = Relocation.objects.count() + relocation_file_count = RelocationFile.objects.count() + + response = response = self.get_error_response(self.existing_org.slug, status_code=400) + + assert response.data.get("detail") is not None + assert response.data.get("detail") == ERR_CANNOT_FORK_INTO_SAME_REGION.substitute( + region=EXPORTING_TEST_REGION, + ) + assert uploading_start_mock.call_count == 0 + assert Relocation.objects.count() == relocation_count + assert RelocationFile.objects.count() == relocation_file_count + + for stat in [ + Relocation.Status.SUCCESS, + Relocation.Status.FAILURE, + ]: + + @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) + @patch("sentry.tasks.relocation.uploading_start.apply_async") + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_good_completed_relocation_for_same_organization( + self, + uploading_start_mock: Mock, + stat=stat, + ): + self.login_as(user=self.superuser, superuser=True) + Relocation.objects.create( + creator_id=self.superuser.id, + owner_id=self.existing_org_owner.id, + want_org_slugs=[self.existing_org.slug], + status=stat.value, + step=Relocation.Step.COMPLETED.value, + provenance=Relocation.Provenance.SAAS_TO_SAAS.value, + ) + relocation_count = Relocation.objects.count() + relocation_file_count = RelocationFile.objects.count() + + response = self.get_success_response(self.existing_org.slug) + + assert response.data["status"] == Relocation.Status.IN_PROGRESS.name + assert response.data["step"] == Relocation.Step.UPLOADING.name + assert response.data["provenance"] == Relocation.Provenance.SAAS_TO_SAAS.name + + relocation: Relocation = Relocation.objects.get( + owner_id=self.existing_org_owner.id, status=Relocation.Status.IN_PROGRESS.value + ) + assert str(relocation.uuid) == response.data["uuid"] + assert relocation.want_org_slugs == [self.requested_org_slug] + assert Relocation.objects.count() == relocation_count + 1 + assert RelocationFile.objects.count() == relocation_file_count + + assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with( + args=[UUID(response.data["uuid"]), EXPORTING_TEST_REGION, self.requested_org_slug] + ) + + for stat in [ + Relocation.Status.IN_PROGRESS, + Relocation.Status.PAUSE, + ]: + + @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) + @patch("sentry.tasks.relocation.uploading_start.apply_async") + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_bad_active_relocation_for_same_organization( + self, + uploading_start_mock: Mock, + stat=stat, + ): + self.login_as(user=self.superuser, superuser=True) + existing_relocation = Relocation.objects.create( + creator_id=self.superuser.id, + owner_id=self.existing_org_owner.id, + want_org_slugs=[self.existing_org.slug], + status=stat.value, + step=Relocation.Step.UPLOADING.value, + provenance=Relocation.Provenance.SAAS_TO_SAAS.value, + ) + + response = self.get_error_response(self.existing_org.slug, status_code=409) + + assert response.data.get("detail") is not None + assert response.data.get("detail") == ERR_DUPLICATE_ORGANIZATION_FORK.substitute( + uuid=str(existing_relocation.uuid) + ) + assert uploading_start_mock.call_count == 0 + + @override_options( + {"relocation.enabled": True, "relocation.daily-limit.small": 1, "staff.ga-rollout": True} + ) + @patch("sentry.tasks.relocation.uploading_start.apply_async") + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_good_no_throttle_for_staff( + self, + uploading_start_mock: Mock, + ): + self.login_as(user=self.staff_user, staff=True) + Relocation.objects.create( + creator_id=self.superuser.id, + owner_id=self.existing_org_owner.id, + want_org_slugs=["some-other-org"], + status=Relocation.Status.SUCCESS.value, + step=Relocation.Step.COMPLETED.value, + provenance=Relocation.Provenance.SAAS_TO_SAAS.value, + ) + relocation_count = Relocation.objects.count() + relocation_file_count = RelocationFile.objects.count() + + response = self.get_success_response(self.existing_org.slug) + + assert response.data["status"] == Relocation.Status.IN_PROGRESS.name + assert response.data["step"] == Relocation.Step.UPLOADING.name + assert response.data["provenance"] == Relocation.Provenance.SAAS_TO_SAAS.name + + relocation: Relocation = Relocation.objects.get( + owner_id=self.existing_org_owner.id, status=Relocation.Status.IN_PROGRESS.value + ) + assert str(relocation.uuid) == response.data["uuid"] + assert relocation.want_org_slugs == [self.requested_org_slug] + assert Relocation.objects.count() == relocation_count + 1 + assert RelocationFile.objects.count() == relocation_file_count + + assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with( + args=[UUID(response.data["uuid"]), EXPORTING_TEST_REGION, self.requested_org_slug] + ) + + @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) + @patch("sentry.tasks.relocation.uploading_start.apply_async") + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_good_no_throttle_for_superuser( + self, + uploading_start_mock: Mock, + ): + self.login_as(user=self.superuser, superuser=True) + Relocation.objects.create( + creator_id=self.superuser.id, + owner_id=self.existing_org_owner.id, + want_org_slugs=["some-other-org"], + status=Relocation.Status.SUCCESS.value, + step=Relocation.Step.COMPLETED.value, + provenance=Relocation.Provenance.SAAS_TO_SAAS.value, + ) + relocation_count = Relocation.objects.count() + relocation_file_count = RelocationFile.objects.count() + + response = self.get_success_response(self.existing_org.slug) + + assert response.data["status"] == Relocation.Status.IN_PROGRESS.name + assert response.data["step"] == Relocation.Step.UPLOADING.name + assert response.data["provenance"] == Relocation.Provenance.SAAS_TO_SAAS.name + + relocation: Relocation = Relocation.objects.get( + owner_id=self.existing_org_owner.id, status=Relocation.Status.IN_PROGRESS.value + ) + assert str(relocation.uuid) == response.data["uuid"] + assert relocation.want_org_slugs == [self.requested_org_slug] + assert Relocation.objects.count() == relocation_count + 1 + assert RelocationFile.objects.count() == relocation_file_count + + assert uploading_start_mock.call_count == 1 + uploading_start_mock.assert_called_with( + args=[UUID(response.data["uuid"]), EXPORTING_TEST_REGION, self.requested_org_slug] + ) + + @patch("sentry.tasks.relocation.uploading_start.apply_async") + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_bad_without_superuser_or_staff( + self, + uploading_start_mock: Mock, + ): + self.login_as(user=self.existing_org_owner, superuser=False, staff=False) + relocation_count = Relocation.objects.count() + relocation_file_count = RelocationFile.objects.count() + + self.get_error_response(self.existing_org.slug, status_code=403) + + assert uploading_start_mock.call_count == 0 + assert Relocation.objects.count() == relocation_count + assert RelocationFile.objects.count() == relocation_file_count + + @patch("sentry.tasks.relocation.uploading_start.apply_async") + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_bad_superuser_not_active( + self, + uploading_start_mock: Mock, + ): + self.login_as(user=self.superuser, superuser=False) + relocation_count = Relocation.objects.count() + relocation_file_count = RelocationFile.objects.count() + + self.get_error_response(self.existing_org.slug, status_code=403) + + assert uploading_start_mock.call_count == 0 + assert Relocation.objects.count() == relocation_count + assert RelocationFile.objects.count() == relocation_file_count + + @override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1}) + @patch("sentry.tasks.relocation.uploading_start.apply_async") + @assume_test_silo_mode(SiloMode.REGION, region_name=REQUESTING_TEST_REGION) + def test_bad_no_auth( + self, + uploading_start_mock: Mock, + ): + relocation_count = Relocation.objects.count() + relocation_file_count = RelocationFile.objects.count() + + self.get_error_response(self.existing_org.slug, status_code=401) + + assert uploading_start_mock.call_count == 0 + assert Relocation.objects.count() == relocation_count + assert RelocationFile.objects.count() == relocation_file_count