Skip to content

Commit

Permalink
feat(relocation): Add organization-fork endpoint (#74397)
Browse files Browse the repository at this point in the history
This endpoint triggers a SAAS->SAAS relocation. Since this relocation is
really just a duplication of an existing org in another region (aka a
"fork"), I've decided it made more sense to list under the org-slug of
the org being exported, rather than under the `/relocation/` path.
  • Loading branch information
azaslavsky committed Jul 18, 2024
1 parent ec7d531 commit 82db2fc
Show file tree
Hide file tree
Showing 5 changed files with 658 additions and 11 deletions.
158 changes: 158 additions & 0 deletions src/sentry/api/endpoints/organization_fork.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,158 @@
import logging
from string import Template

from django.db import router
from rest_framework import status
from rest_framework.request import Request
from rest_framework.response import Response

from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import Endpoint, region_silo_endpoint
from sentry.api.endpoints.relocations.index import (
get_autopause_value,
validate_relocation_uniqueness,
)
from sentry.api.permissions import SuperuserOrStaffFeatureFlaggedPermission
from sentry.api.serializers import serialize
from sentry.hybridcloud.services.organization_mapping import organization_mapping_service
from sentry.models.relocation import Relocation
from sentry.organizations.services.organization import organization_service
from sentry.tasks.relocation import uploading_start
from sentry.types.region import get_local_region
from sentry.utils.db import atomic_transaction

ERR_DUPLICATE_ORGANIZATION_FORK = Template(
"This organization is already in the process of being forked, relocation id: $uuid"
)
ERR_ORGANIZATION_NOT_FOUND = Template("The target organization `$pointer` could not be found.")
ERR_ORGANIZATION_MAPPING_NOT_FOUND = Template(
"The target organization `$slug` has no region mapping."
)
ERR_CANNOT_FORK_INTO_SAME_REGION = Template(
"The organization already lives in region `$region`, so it cannot be forked into that region."
)

logger = logging.getLogger(__name__)


@region_silo_endpoint
class OrganizationForkEndpoint(Endpoint):
owner = ApiOwner.OPEN_SOURCE
publish_status = {
"POST": ApiPublishStatus.EXPERIMENTAL,
}
permission_classes = (SuperuserOrStaffFeatureFlaggedPermission,)

def post(self, request: Request, organization_id_or_slug) -> Response:
"""
Duplicate an organization across regions. The old organization remains untouched. We kick
off the standard `SAAS_TO_SAAS` relocation flow to create a duplicate in this region.
Because each region silo of the API has its own version of this endpoint, we assume that the
target region for the fork is the owning region of the API on which this was called. For
example, if we call this endpoint at `us.sentry.io`, we are implicitly saying we would like
the target organization forked INTO the `us` region.
``````````````````````````````````````````````````
:pparam string org_slug: the id or slug of the organization
:auth: required
"""

logger.info("relocations.fork.post.start", extra={"caller": request.user.id})

org_retrieval_args = {
"only_visible": True,
"include_projects": False,
"include_teams": False,
}
org_context = (
organization_service.get_organization_by_id(id=organization_id_or_slug)
if str(organization_id_or_slug).isdecimal()
else organization_service.get_organization_by_slug(
slug=organization_id_or_slug, **org_retrieval_args
)
)
if not org_context:
return Response(
{
"detail": ERR_ORGANIZATION_NOT_FOUND.substitute(
pointer=organization_id_or_slug,
)
},
status=status.HTTP_404_NOT_FOUND,
)

organization = org_context.organization
org_slug = organization.slug
org_mapping = organization_mapping_service.get(organization_id=organization.id)
if not org_mapping:
return Response(
{
"detail": ERR_ORGANIZATION_NOT_FOUND.substitute(
slug=org_slug,
)
},
status=status.HTTP_404_NOT_FOUND,
)

# Figure out which region the organization being forked lives in.
requesting_region_name = get_local_region().name
replying_region_name = org_mapping.region_name
if replying_region_name == requesting_region_name:
return Response(
{
"detail": ERR_CANNOT_FORK_INTO_SAME_REGION.substitute(
region=requesting_region_name,
)
},
status=status.HTTP_400_BAD_REQUEST,
)

# If there is an in-progress relocation into this region with for this org already, block
# this one until that one resolves.
duplicate_relocation = Relocation.objects.filter(
provenance=Relocation.Provenance.SAAS_TO_SAAS.value,
want_org_slugs=[organization.slug],
status__in={Relocation.Status.IN_PROGRESS.value, Relocation.Status.PAUSE.value},
).first()
if duplicate_relocation is not None:
return Response(
{
"detail": ERR_DUPLICATE_ORGANIZATION_FORK.substitute(
uuid=duplicate_relocation.uuid
)
},
status=status.HTTP_409_CONFLICT,
)

# Identify who will be the owner of the newly forked organization, and ensure that they
# don't already have relocations in flight.
owners = organization.get_owners()
owner = owners[0] if len(owners) > 0 else request.user
err = validate_relocation_uniqueness(owner)
if err is not None:
return err

# We do not create a `RelocationFile` yet. Instead, we trigger a series of RPC calls (via
# `uploading_start`, scheduled below) to create an export of the organization we are seeking
# duplicate from the foreign region.
with atomic_transaction(using=(router.db_for_write(Relocation))):
new_relocation: Relocation = Relocation.objects.create(
creator_id=request.user.id,
owner_id=owner.id,
step=Relocation.Step.UPLOADING.value,
scheduled_pause_at_step=get_autopause_value(),
provenance=Relocation.Provenance.SAAS_TO_SAAS,
want_org_slugs=[organization.slug],
)

# Kick off the asynchronous process of exporting the relocation from the partner region.
# When we received this back (via RPC call), we'll be able to continue with the usual
# relocation flow, picking up from the `uploading_complete` task.
uploading_start.apply_async(
args=[new_relocation.uuid, replying_region_name, organization.slug]
)

return Response(serialize(new_relocation), status=status.HTTP_201_CREATED)
7 changes: 4 additions & 3 deletions src/sentry/api/endpoints/relocations/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from functools import reduce
from string import Template

from django.contrib.auth.models import AnonymousUser
from django.db import router
from django.db.models import Q
from django.utils import timezone
Expand All @@ -24,7 +25,7 @@
from sentry.auth.elevated_mode import has_elevated_mode
from sentry.models.files.file import File
from sentry.models.relocation import Relocation, RelocationFile
from sentry.models.user import MAX_USERNAME_LENGTH
from sentry.models.user import MAX_USERNAME_LENGTH, User
from sentry.options import get
from sentry.search.utils import tokenize_query
from sentry.signals import relocation_link_promo_code
Expand Down Expand Up @@ -128,7 +129,7 @@ def validate_new_relocation_request(
return None


def validate_relocation_uniqueness(owner: RpcUser) -> Response | None:
def validate_relocation_uniqueness(owner: RpcUser | AnonymousUser | User) -> Response | None:
# Check that this `owner` does not have more than one active `Relocation` in flight.
if Relocation.objects.filter(
owner_id=owner.id,
Expand Down Expand Up @@ -277,7 +278,7 @@ def post(self, request: Request) -> Response:
relocation_link_promo_code.send_robust(
relocation_uuid=relocation.uuid, promo_code=promo_code, sender=self.__class__
)
uploading_start.delay(relocation.uuid, None, None)
uploading_start.apply_async(args=[relocation.uuid, None, None])
try:
analytics.record(
"relocation.created",
Expand Down
7 changes: 7 additions & 0 deletions src/sentry/api/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from sentry.api.endpoints.organization_events_root_cause_analysis import (
OrganizationEventsRootCauseAnalysisEndpoint,
)
from sentry.api.endpoints.organization_fork import OrganizationForkEndpoint
from sentry.api.endpoints.organization_integration_migrate_opsgenie import (
OrganizationIntegrationMigrateOpsgenieEndpoint,
)
Expand Down Expand Up @@ -2168,6 +2169,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
OrganizationRegionEndpoint.as_view(),
name="sentry-api-0-organization-region",
),
# Trigger relocation
re_path(
r"^(?P<organization_id_or_slug>[^\/]+)/fork/$",
OrganizationForkEndpoint.as_view(),
name="sentry-api-0-organization-fork",
),
]

PROJECT_URLS: list[URLPattern | URLResolver] = [
Expand Down
24 changes: 16 additions & 8 deletions tests/sentry/api/endpoints/relocations/test_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,7 @@ def tmp_keys(self, tmp_dir: str) -> tuple[Path, Path]:
return (tmp_priv_key_path, tmp_pub_key_path)

@override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1})
@patch("sentry.tasks.relocation.uploading_start.delay")
@patch("sentry.tasks.relocation.uploading_start.apply_async")
def test_good_simple(
self,
uploading_start_mock: Mock,
Expand Down Expand Up @@ -351,6 +351,7 @@ def test_good_simple(
assert RelocationFile.objects.count() == relocation_file_count + 1

assert uploading_start_mock.call_count == 1
uploading_start_mock.assert_called_with(args=[UUID(response.data["uuid"]), None, None])

assert analytics_record_mock.call_count == 1
analytics_record_mock.assert_called_with(
Expand All @@ -368,7 +369,7 @@ def test_good_simple(
)

@override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1})
@patch("sentry.tasks.relocation.uploading_start.delay")
@patch("sentry.tasks.relocation.uploading_start.apply_async")
def test_good_promo_code(
self,
uploading_start_mock: Mock,
Expand Down Expand Up @@ -415,6 +416,7 @@ def test_good_promo_code(
assert RelocationFile.objects.count() == relocation_file_count + 1

assert uploading_start_mock.call_count == 1
uploading_start_mock.assert_called_with(args=[UUID(response.data["uuid"]), None, None])

assert analytics_record_mock.call_count == 1
analytics_record_mock.assert_called_with(
Expand All @@ -438,7 +440,7 @@ def test_good_promo_code(
"relocation.autopause": "IMPORTING",
}
)
@patch("sentry.tasks.relocation.uploading_start.delay")
@patch("sentry.tasks.relocation.uploading_start.apply_async")
def test_good_with_valid_autopause_option(
self,
uploading_start_mock: Mock,
Expand Down Expand Up @@ -469,6 +471,7 @@ def test_good_with_valid_autopause_option(
assert response.data["scheduledPauseAtStep"] == Relocation.Step.IMPORTING.name

assert uploading_start_mock.call_count == 1
uploading_start_mock.assert_called_with(args=[UUID(response.data["uuid"]), None, None])

assert analytics_record_mock.call_count == 1
analytics_record_mock.assert_called_with(
Expand All @@ -492,7 +495,7 @@ def test_good_with_valid_autopause_option(
"relocation.autopause": "DOESNOTEXIST",
}
)
@patch("sentry.tasks.relocation.uploading_start.delay")
@patch("sentry.tasks.relocation.uploading_start.apply_async")
def test_good_with_invalid_autopause_option(
self,
uploading_start_mock: Mock,
Expand Down Expand Up @@ -523,6 +526,8 @@ def test_good_with_invalid_autopause_option(
assert response.data["scheduledPauseAtStep"] is None

assert uploading_start_mock.call_count == 1
uploading_start_mock.assert_called_with(args=[UUID(response.data["uuid"]), None, None])

assert analytics_record_mock.call_count == 1
analytics_record_mock.assert_called_with(
"relocation.created",
Expand All @@ -541,7 +546,7 @@ def test_good_with_invalid_autopause_option(
@override_options(
{"relocation.enabled": False, "relocation.daily-limit.small": 1, "staff.ga-rollout": True}
)
@patch("sentry.tasks.relocation.uploading_start.delay")
@patch("sentry.tasks.relocation.uploading_start.apply_async")
def test_good_staff_when_feature_disabled(
self,
uploading_start_mock: Mock,
Expand Down Expand Up @@ -585,6 +590,7 @@ def test_good_staff_when_feature_disabled(
assert RelocationFile.objects.count() == relocation_file_count + 1

assert uploading_start_mock.call_count == 1
uploading_start_mock.assert_called_with(args=[UUID(response.data["uuid"]), None, None])

assert analytics_record_mock.call_count == 1
analytics_record_mock.assert_called_with(
Expand All @@ -602,7 +608,7 @@ def test_good_staff_when_feature_disabled(
)

@override_options({"relocation.enabled": False, "relocation.daily-limit.small": 1})
@patch("sentry.tasks.relocation.uploading_start.delay")
@patch("sentry.tasks.relocation.uploading_start.apply_async")
def test_good_superuser_when_feature_disabled(
self,
uploading_start_mock: Mock,
Expand Down Expand Up @@ -646,6 +652,7 @@ def test_good_superuser_when_feature_disabled(
assert RelocationFile.objects.count() == relocation_file_count + 1

assert uploading_start_mock.call_count == 1
uploading_start_mock.assert_called_with(args=[UUID(response.data["uuid"]), None, None])

assert analytics_record_mock.call_count == 1
analytics_record_mock.assert_called_with(
Expand Down Expand Up @@ -725,7 +732,7 @@ def test_bad_expired_superuser_when_feature_disabled(
]:

@override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1})
@patch("sentry.tasks.relocation.uploading_start.delay")
@patch("sentry.tasks.relocation.uploading_start.apply_async")
def test_good_valid_org_slugs(
self,
uploading_start_mock: Mock,
Expand Down Expand Up @@ -761,6 +768,7 @@ def test_good_valid_org_slugs(
assert RelocationFile.objects.count() == relocation_file_count + 1
assert Relocation.objects.get(owner_id=self.owner.id).want_org_slugs == expected
assert uploading_start_mock.call_count == 1
uploading_start_mock.assert_called_with(args=[UUID(response.data["uuid"]), None, None])

assert analytics_record_mock.call_count == 1
analytics_record_mock.assert_called_with(
Expand All @@ -785,7 +793,7 @@ def test_good_valid_org_slugs(
]:

@override_options({"relocation.enabled": True, "relocation.daily-limit.small": 1})
@patch("sentry.tasks.relocation.uploading_start.delay")
@patch("sentry.tasks.relocation.uploading_start.apply_async")
def test_bad_invalid_org_slugs(
self,
analytics_record_mock: Mock,
Expand Down
Loading

0 comments on commit 82db2fc

Please sign in to comment.