From 4577593b1c6e6e50ec5a5b29cfda517d1ae8a11d Mon Sep 17 00:00:00 2001 From: Alex Zaslavsky Date: Wed, 6 Sep 2023 15:22:27 -0700 Subject: [PATCH] feat(backup): Generate new OrgAuthToken on input Instead of making a duplicate of the existing token when we copy an org across regions, which could cause problems for downstream code that relies on this uniqueness, we simply make a new instance of the token value. This will break operations that rely on this token as users move from the old org to the new one, but this an okay price to pay to preserve uniqueness. Issue: getsentry/team-ospo#193 --- src/sentry/models/orgauthtoken.py | 45 ++++++++++++++- src/sentry/models/projectkey.py | 3 - src/sentry/testutils/helpers/backups.py | 19 ++++--- tests/sentry/backup/test_coverage.py | 4 +- tests/sentry/backup/test_exhaustive.py | 76 ++++++++++++++++++++++--- tests/sentry/backup/test_imports.py | 60 +++++++++++++++---- 6 files changed, 174 insertions(+), 33 deletions(-) diff --git a/src/sentry/models/orgauthtoken.py b/src/sentry/models/orgauthtoken.py index 4ef86e84189f2c..d0012af82c60e2 100644 --- a/src/sentry/models/orgauthtoken.py +++ b/src/sentry/models/orgauthtoken.py @@ -1,11 +1,16 @@ from __future__ import annotations +from typing import Optional, Tuple + from django.core.exceptions import ValidationError from django.db import models +from django.forms import model_to_dict from django.utils import timezone from django.utils.encoding import force_str -from sentry.backup.scopes import RelocationScope +from sentry.backup.dependencies import ImportKind, PrimaryKeyMap +from sentry.backup.helpers import ImportFlags +from sentry.backup.scopes import ImportScope, RelocationScope from sentry.conf.server import SENTRY_SCOPES from sentry.db.models import ( ArrayField, @@ -16,6 +21,7 @@ sane_repr, ) from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey +from sentry.services.hybrid_cloud.organization import organization_service from sentry.services.hybrid_cloud.orgauthtoken import orgauthtoken_service MAX_NAME_LENGTH = 255 @@ -76,6 +82,43 @@ def has_scope(self, scope): def is_active(self) -> bool: return self.date_deactivated is None + def write_relocation_import( + self, pk_map: PrimaryKeyMap, scope: ImportScope, flags: ImportFlags + ) -> Optional[Tuple[int, int, ImportKind]]: + # TODO(getsentry/team-ospo#190): Prevents a circular import; could probably split up the + # source module in such a way that this is no longer an issue. + from sentry.api.utils import generate_region_url + from sentry.utils.security.orgauthtoken_token import generate_token, hash_token + + old_pk = super()._normalize_before_relocation_import(pk_map, scope, flags) + if old_pk is None: + return None + + # If there is a token collision, or the token does not exist for some reason, generate a new + # one. + matching_token_hashed = self.__class__.objects.filter( + token_hashed=self.token_hashed + ).first() + if (not self.token_hashed) or matching_token_hashed: + org_context = organization_service.get_organization_by_id(id=self.organization_id) + if org_context is None: + return None + + token_str = generate_token(org_context.organization.slug, generate_region_url()) + self.token_hashed = hash_token(token_str) + self.token_last_characters = token_str[-4:] + + (key, created) = OrgAuthToken.objects.get_or_create( + token_hashed=self.token_hashed, + token_last_characters=self.token_last_characters, + defaults=model_to_dict(self), + ) + if key: + self.pk = key.pk + self.save() + + return (old_pk, self.pk, ImportKind.Inserted if created else ImportKind.Existing) + def is_org_auth_token_auth(auth: object) -> bool: """:returns True when an API token is hitting the API.""" diff --git a/src/sentry/models/projectkey.py b/src/sentry/models/projectkey.py index 27505553bb94fd..e0ceba7310d79f 100644 --- a/src/sentry/models/projectkey.py +++ b/src/sentry/models/projectkey.py @@ -288,9 +288,6 @@ def write_relocation_import( return None # If there is a key collision, generate new keys. - # - # TODO(getsentry/team-ospo#190): Is this the right behavior? Another option is to just - # update the SQL schema to no longer require uniqueness. matching_public_key = self.__class__.objects.filter(public_key=self.public_key).first() if not self.public_key or matching_public_key: self.public_key = self.generate_api_key() diff --git a/src/sentry/testutils/helpers/backups.py b/src/sentry/testutils/helpers/backups.py index 666e7db5b32cce..66784270991d05 100644 --- a/src/sentry/testutils/helpers/backups.py +++ b/src/sentry/testutils/helpers/backups.py @@ -262,14 +262,6 @@ def create_exhaustive_organization( ) # Auth* - OrgAuthToken.objects.create( - organization_id=org.id, - name=f"token 1 for {slug}", - token_hashed=f"ABCDEF{slug}", - token_last_characters="xyz1", - scope_list=["org:ci"], - date_last_used=None, - ) ApiKey.objects.create(key=uuid4().hex, organization_id=org.id) auth_provider = AuthProvider.objects.create(organization_id=org.id, provider="sentry") AuthIdentity.objects.create( @@ -298,6 +290,17 @@ def create_exhaustive_organization( ) ProjectRedirect.record(project, f"project_slug_in_{slug}") + # OrgAuthToken + OrgAuthToken.objects.create( + organization_id=org.id, + name=f"token 1 for {slug}", + token_hashed=f"ABCDEF{slug}", + token_last_characters="xyz1", + scope_list=["org:ci"], + date_last_used=None, + project_last_used_id=project.id, + ) + # Integration* integration = Integration.objects.create( provider="slack", name=f"Slack for {slug}", external_id=f"slack:{slug}" diff --git a/tests/sentry/backup/test_coverage.py b/tests/sentry/backup/test_coverage.py index 2516d94418e88b..22592ec04ca698 100644 --- a/tests/sentry/backup/test_coverage.py +++ b/tests/sentry/backup/test_coverage.py @@ -1,7 +1,7 @@ from __future__ import annotations from sentry.backup.helpers import get_exportable_sentry_models -from tests.sentry.backup.test_exhaustive import RELEASE_TESTED_MODELS +from tests.sentry.backup.test_exhaustive import EXHAUSTIVELY_TESTED_MODELS from tests.sentry.backup.test_models import UNIT_TESTED_MODELS ALL_EXPORTABLE_MODELS = {c.__name__ for c in get_exportable_sentry_models()} @@ -13,5 +13,5 @@ def test_exportable_final_derivations_of_sentry_model_are_unit_tested(): def test_exportable_final_derivations_of_sentry_model_are_exhaustively_tested(): - untested = ALL_EXPORTABLE_MODELS - RELEASE_TESTED_MODELS + untested = ALL_EXPORTABLE_MODELS - EXHAUSTIVELY_TESTED_MODELS assert not untested diff --git a/tests/sentry/backup/test_exhaustive.py b/tests/sentry/backup/test_exhaustive.py index 78b4c70aa26305..67f2f0c290197a 100644 --- a/tests/sentry/backup/test_exhaustive.py +++ b/tests/sentry/backup/test_exhaustive.py @@ -1,12 +1,21 @@ from __future__ import annotations +import tempfile +from pathlib import Path from typing import Literal, Type from sentry.backup.helpers import get_exportable_sentry_models -from sentry.testutils.helpers.backups import BackupTestCase +from sentry.backup.imports import import_in_global_scope, import_in_organization_scope +from sentry.backup.scopes import ExportScope +from sentry.testutils.helpers.backups import ( + NOOP_PRINTER, + BackupTestCase, + clear_database, + export_to_file, +) from tests.sentry.backup import run_backup_tests_only_on_single_db, targets -RELEASE_TESTED_MODELS = set() +EXHAUSTIVELY_TESTED_MODELS = set() def mark(*marking: Type | Literal["__all__"]): @@ -19,23 +28,74 @@ def mark(*marking: Type | Literal["__all__"]): for model in marking: if model == all: all_models = get_exportable_sentry_models() - RELEASE_TESTED_MODELS.update({c.__name__ for c in all_models}) + EXHAUSTIVELY_TESTED_MODELS.update({c.__name__ for c in all_models}) return list(all_models) - RELEASE_TESTED_MODELS.add(model.__name__) + EXHAUSTIVELY_TESTED_MODELS.add(model.__name__) return marking @run_backup_tests_only_on_single_db -class ReleaseTests(BackupTestCase): - """Ensure that the all Sentry models are still exportable.""" +class ExhaustiveTests(BackupTestCase): + """Ensure that a database with all exportable models filled out still works.""" @targets(mark("__all__")) - def test_at_head_clean_pks(self): + def test_exhaustive_clean_pks(self): self.create_exhaustive_instance(is_superadmin=True) return self.import_export_then_validate(self._testMethodName, reset_pks=True) @targets(mark("__all__")) - def test_at_head_dirty_pks(self): + def test_exhaustive_dirty_pks(self): self.create_exhaustive_instance(is_superadmin=True) return self.import_export_then_validate(self._testMethodName, reset_pks=False) + + +@run_backup_tests_only_on_single_db +class UniquenessTests(BackupTestCase): + """Ensure that required uniqueness (ie, model fields marked `unique=True`) is honored.""" + + def export_to_tmp_file_and_clear_database(self, tmp_dir, reset_pks) -> Path: + tmp_path = Path(tmp_dir).joinpath(f"{self._testMethodName}.expect.json") + export_to_file(tmp_path, ExportScope.Global) + clear_database(reset_pks=reset_pks) + return tmp_path + + @targets(mark("__all__")) + def test_uniqueness_clean_pks(self): + self.create_exhaustive_instance(is_superadmin=True) + with tempfile.TemporaryDirectory() as tmp_dir: + tmp_actual = Path(tmp_dir).joinpath(f"{self._testMethodName}.actual.json") + tmp_expect = self.export_to_tmp_file_and_clear_database(tmp_dir, True) + + # Now import twice, so that all random values in the export (UUIDs etc) are identical, + # to test that these are properly replaced and handled. + with open(tmp_expect) as tmp_file: + import_in_global_scope(tmp_file, printer=NOOP_PRINTER) + with open(tmp_expect) as tmp_file: + # Back-to-back global scope imports are disallowed (global scope assume a clean + # database), so use organization scope instead. + import_in_organization_scope(tmp_file, printer=NOOP_PRINTER) + + actual = export_to_file(tmp_actual, ExportScope.Global) + + return actual + + @targets(mark("__all__")) + def test_uniqueness_dirty_pks(self): + self.create_exhaustive_instance(is_superadmin=True) + with tempfile.TemporaryDirectory() as tmp_dir: + tmp_actual = Path(tmp_dir).joinpath(f"{self._testMethodName}.actual.json") + tmp_expect = self.export_to_tmp_file_and_clear_database(tmp_dir, False) + + # Now import twice, so that all random values in the export (UUIDs etc) are identical, + # to test that these are properly replaced and handled. + with open(tmp_expect) as tmp_file: + import_in_global_scope(tmp_file, printer=NOOP_PRINTER) + with open(tmp_expect) as tmp_file: + # Back-to-back global scope imports are disallowed (global scope assume a clean + # database), so use organization scope instead. + import_in_organization_scope(tmp_file, printer=NOOP_PRINTER) + + actual = export_to_file(tmp_actual, ExportScope.Global) + + return actual diff --git a/tests/sentry/backup/test_imports.py b/tests/sentry/backup/test_imports.py index 98712527382c8e..971cb5277171fd 100644 --- a/tests/sentry/backup/test_imports.py +++ b/tests/sentry/backup/test_imports.py @@ -623,6 +623,47 @@ class CollisionTests(ImportTestCase): Ensure that collisions are properly handled in different flag modes. """ + def test_colliding_org_auth_token(self): + owner = self.create_exhaustive_user("owner") + invited = self.create_exhaustive_user("invited") + member = self.create_exhaustive_user("member") + self.create_exhaustive_organization("some-org", owner, invited, [member]) + + # Take note of the `OrgAuthToken` that was created by the exhaustive organization - this is + # the one we'll be importing. + colliding = OrgAuthToken.objects.filter().first() + + with tempfile.TemporaryDirectory() as tmp_dir: + tmp_path = self.export_to_tmp_file_and_clear_database(tmp_dir) + + # After exporting and clearing the database, insert a copy of the same `ProjectKey` as + # the one found in the import. + org = self.create_organization() + colliding.organization_id = org.id + colliding.project_last_used_id = self.create_project(organization=org).id + colliding.save() + + assert OrgAuthToken.objects.count() == 1 + assert OrgAuthToken.objects.filter(token_hashed=colliding.token_hashed).count() == 1 + assert ( + OrgAuthToken.objects.filter( + token_last_characters=colliding.token_last_characters + ).count() + == 1 + ) + + with open(tmp_path) as tmp_file: + import_in_organization_scope(tmp_file, printer=NOOP_PRINTER) + + assert OrgAuthToken.objects.count() == 2 + assert OrgAuthToken.objects.filter(token_hashed=colliding.token_hashed).count() == 1 + assert ( + OrgAuthToken.objects.filter( + token_last_characters=colliding.token_last_characters + ).count() + == 1 + ) + def test_colliding_project_key(self): owner = self.create_exhaustive_user("owner") invited = self.create_exhaustive_user("invited") @@ -632,28 +673,25 @@ def test_colliding_project_key(self): # Take note of the `ProjectKey` that was created by the exhaustive organization - this is # the one we'll be importing. colliding = ProjectKey.objects.filter().first() - colliding_public_key = colliding.public_key - colliding_secret_key = colliding.secret_key with tempfile.TemporaryDirectory() as tmp_dir: tmp_path = self.export_to_tmp_file_and_clear_database(tmp_dir) # After exporting and clearing the database, insert a copy of the same `ProjectKey` as # the one found in the import. - project = self.create_project() - ProjectKey.objects.create( - project=project, - label="Test", - public_key=colliding_public_key, - secret_key=colliding_secret_key, - ) + colliding.project = self.create_project() + colliding.save() + + assert ProjectKey.objects.count() < 4 + assert ProjectKey.objects.filter(public_key=colliding.public_key).count() == 1 + assert ProjectKey.objects.filter(secret_key=colliding.secret_key).count() == 1 with open(tmp_path) as tmp_file: import_in_organization_scope(tmp_file, printer=NOOP_PRINTER) assert ProjectKey.objects.count() == 4 - assert ProjectKey.objects.filter(public_key=colliding_public_key).count() == 1 - assert ProjectKey.objects.filter(secret_key=colliding_secret_key).count() == 1 + assert ProjectKey.objects.filter(public_key=colliding.public_key).count() == 1 + assert ProjectKey.objects.filter(secret_key=colliding.secret_key).count() == 1 def test_colliding_user_with_merging_enabled_in_user_scope(self): self.create_exhaustive_user(username="owner", email="owner@example.com")