Skip to content

Commit

Permalink
🛠️ apply pre-commit fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
getsantry[bot] authored Aug 22, 2024
1 parent 8e8b54b commit 683900b
Show file tree
Hide file tree
Showing 5 changed files with 361 additions and 449 deletions.
31 changes: 10 additions & 21 deletions src/sentry/api/authentication.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

import logging
import hashlib
import logging
import random
from collections.abc import Callable, Iterable
from typing import Any, ClassVar
Expand All @@ -23,11 +23,7 @@
from sentry import options
from sentry.auth.services.auth import AuthenticatedToken
from sentry.auth.system import SystemToken, is_internal_ip
from sentry.hybridcloud.models import (
ApiKeyReplica,
ApiTokenReplica,
OrgAuthTokenReplica,
)
from sentry.hybridcloud.models import ApiKeyReplica, ApiTokenReplica, OrgAuthTokenReplica
from sentry.hybridcloud.rpc.service import compare_signature
from sentry.models.apiapplication import ApiApplication
from sentry.models.apikey import ApiKey
Expand All @@ -47,10 +43,7 @@
from sentry.users.services.user.service import user_service
from sentry.utils.linksign import process_signature
from sentry.utils.sdk import Scope
from sentry.utils.security.orgauthtoken_token import (
SENTRY_ORG_AUTH_TOKEN_PREFIX,
hash_token,
)
from sentry.utils.security.orgauthtoken_token import SENTRY_ORG_AUTH_TOKEN_PREFIX, hash_token

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -234,9 +227,7 @@ def authenticate_credentials(
raise AuthenticationFailed("Unknown relay")

try:
data = relay.public_key_object.unpack(
request.body, relay_sig, max_age=60 * 5
)
data = relay.public_key_object.unpack(request.body, relay_sig, max_age=60 * 5)
request.relay = relay
request.relay_request_data = data
except UnpackError:
Expand Down Expand Up @@ -342,9 +333,7 @@ class TokenStrLookupRequired(Exception):
class UserAuthTokenAuthentication(StandardAuthentication):
token_name = b"bearer"

def _find_or_update_token_by_hash(
self, token_str: str
) -> ApiToken | ApiTokenReplica:
def _find_or_update_token_by_hash(self, token_str: str) -> ApiToken | ApiTokenReplica:
"""
Find token by hash or update token's hash value if only found via plaintext.
Expand Down Expand Up @@ -391,9 +380,9 @@ def _find_or_update_token_by_hash(
except (ApiToken.DoesNotExist, TokenStrLookupRequired):
try:
# If we can't find it by hash, use the plaintext string
api_token = ApiToken.objects.select_related(
"user", "application"
).get(token=token_str)
api_token = ApiToken.objects.select_related("user", "application").get(
token=token_str
)
except ApiToken.DoesNotExist:
# If the token does not exist by plaintext either, it is not a valid token
raise AuthenticationFailed("Invalid token")
Expand Down Expand Up @@ -429,8 +418,8 @@ def authenticate_token(self, request: Request, token_str: str) -> tuple[Any, Any
logger.error(f"USER AUTH REQUEST HEADERS: {request.headers}")
user: AnonymousUser | User | RpcUser | None = AnonymousUser()

token: SystemToken | ApiTokenReplica | ApiToken | None = (
SystemToken.from_request(request, token_str)
token: SystemToken | ApiTokenReplica | ApiToken | None = SystemToken.from_request(
request, token_str
)

application_is_inactive = False
Expand Down
147 changes: 47 additions & 100 deletions src/sentry/api/endpoints/organization_releases.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

import re
import logging
import re
from datetime import datetime, timedelta

from django.db import IntegrityError
Expand All @@ -13,11 +13,7 @@

from sentry import analytics, release_health
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import (
EnvironmentMixin,
ReleaseAnalyticsMixin,
region_silo_endpoint,
)
from sentry.api.base import EnvironmentMixin, ReleaseAnalyticsMixin, region_silo_endpoint
from sentry.api.bases import NoProjects
from sentry.api.bases.organization import OrganizationReleasesBaseEndpoint
from sentry.api.exceptions import ConflictError, InvalidRepository
Expand All @@ -32,10 +28,7 @@
from sentry.api.utils import get_auth_api_token_type
from sentry.exceptions import InvalidSearchQuery
from sentry.models.activity import Activity
from sentry.models.orgauthtoken import (
is_org_auth_token_auth,
update_org_auth_token_last_used,
)
from sentry.models.orgauthtoken import is_org_auth_token_auth, update_org_auth_token_last_used
from sentry.models.project import Project
from sentry.models.release import Release, ReleaseStatus
from sentry.models.releases.exceptions import ReleaseCommitError
Expand Down Expand Up @@ -71,9 +64,7 @@ def get_stats_period_detail(key, choices):
def add_environment_to_queryset(queryset, filter_params):
if "environment" in filter_params:
return queryset.filter(
releaseprojectenvironment__environment__name__in=filter_params[
"environment"
],
releaseprojectenvironment__environment__name__in=filter_params["environment"],
releaseprojectenvironment__project_id__in=filter_params["project_id"],
)
return queryset
Expand All @@ -82,9 +73,7 @@ def add_environment_to_queryset(queryset, filter_params):
def add_date_filter_to_queryset(queryset, filter_params):
"""Once date has been coalesced over released and added, use it to filter releases"""
if filter_params["start"] and filter_params["end"]:
return queryset.filter(
date__gte=filter_params["start"], date__lte=filter_params["end"]
)
return queryset.filter(date__gte=filter_params["start"], date__lte=filter_params["end"])
return queryset


Expand Down Expand Up @@ -159,9 +148,7 @@ class ReleaseSerializerWithProjects(ReleaseWithVersionSerializer):
headCommits = ListField(
child=ReleaseHeadCommitSerializerDeprecated(), required=False, allow_null=False
)
refs = ListField(
child=ReleaseHeadCommitSerializer(), required=False, allow_null=False
)
refs = ListField(child=ReleaseHeadCommitSerializer(), required=False, allow_null=False)


def debounce_update_release_health_data(organization, project_ids: list[int]):
Expand All @@ -179,18 +166,14 @@ def debounce_update_release_health_data(organization, project_ids: list[int]):
if not should_update:
return

projects = {
p.id: p for p in Project.objects.get_many_from_cache(should_update.keys())
}
projects = {p.id: p for p in Project.objects.get_many_from_cache(should_update.keys())}

# This gives us updates for all release-projects which have seen new
# health data over the last days. It will miss releases where the last
# date is longer than what `get_changed_project_release_model_adoptions`
# considers recent.
project_releases = (
release_health.backend.get_changed_project_release_model_adoptions(
should_update.keys()
)
project_releases = release_health.backend.get_changed_project_release_model_adoptions(
should_update.keys()
)

# Check which we already have rows for.
Expand Down Expand Up @@ -244,18 +227,18 @@ class OrganizationReleasesEndpoint(
"GET": ApiPublishStatus.UNKNOWN,
"POST": ApiPublishStatus.UNKNOWN,
}
SESSION_SORTS = frozenset([
"crash_free_sessions",
"crash_free_users",
"sessions",
"users",
"sessions_24h",
"users_24h",
])

def get_projects(
self, request: Request, organization, project_ids=None, project_slugs=None
):
SESSION_SORTS = frozenset(
[
"crash_free_sessions",
"crash_free_users",
"sessions",
"users",
"sessions_24h",
"users_24h",
]
)

def get_projects(self, request: Request, organization, project_ids=None, project_slugs=None):
return super().get_projects(
request,
organization,
Expand All @@ -282,27 +265,19 @@ def get(self, request: Request, organization) -> Response:
sort = request.GET.get("sort") or "date"
health_stat = request.GET.get("healthStat") or "sessions"
summary_stats_period = request.GET.get("summaryStatsPeriod") or "14d"
health_stats_period = request.GET.get("healthStatsPeriod") or (
"24h" if with_health else ""
)
health_stats_period = request.GET.get("healthStatsPeriod") or ("24h" if with_health else "")
if summary_stats_period not in STATS_PERIODS:
raise ParseError(
detail=get_stats_period_detail("summaryStatsPeriod", STATS_PERIODS)
)
raise ParseError(detail=get_stats_period_detail("summaryStatsPeriod", STATS_PERIODS))
if health_stats_period and health_stats_period not in STATS_PERIODS:
raise ParseError(
detail=get_stats_period_detail("healthStatsPeriod", STATS_PERIODS)
)
raise ParseError(detail=get_stats_period_detail("healthStatsPeriod", STATS_PERIODS))
if health_stat not in ("sessions", "users"):
raise ParseError(detail="invalid healthStat")

paginator_cls = OffsetPaginator
paginator_kwargs = {}

try:
filter_params = self.get_filter_params(
request, organization, date_filter_optional=True
)
filter_params = self.get_filter_params(request, organization, date_filter_optional=True)
except NoProjects:
return Response([])

Expand All @@ -328,9 +303,7 @@ def get(self, request: Request, organization) -> Response:
queryset = add_environment_to_queryset(queryset, filter_params)
if query:
try:
queryset = _filter_releases_by_query(
queryset, organization, query, filter_params
)
queryset = _filter_releases_by_query(queryset, organization, query, filter_params)
except InvalidSearchQuery as e:
return Response(
{"detail": str(e)},
Expand All @@ -349,9 +322,7 @@ def get(self, request: Request, organization) -> Response:
queryset = queryset.order_by("-date")
paginator_kwargs["order_by"] = "-date"
elif sort == "build":
queryset = queryset.filter(build_number__isnull=False).order_by(
"-build_number"
)
queryset = queryset.filter(build_number__isnull=False).order_by("-build_number")
paginator_kwargs["order_by"] = "-build_number"
elif sort == "semver":
queryset = queryset.annotate_prerelease_column()
Expand All @@ -370,9 +341,7 @@ def get(self, request: Request, organization) -> Response:
elif sort in self.SESSION_SORTS:
if not flatten:
return Response(
{
"detail": "sorting by crash statistics requires flattening (flatten=1)"
},
{"detail": "sorting by crash statistics requires flattening (flatten=1)"},
status=400,
)

Expand All @@ -384,25 +353,19 @@ def qs_load_func(queryset, total_offset, qs_offset, limit):
: total_offset + limit
]
)
releases_with_session_data = (
release_health.backend.check_releases_have_health_data(
organization.id,
filter_params["project_id"],
release_versions,
(
filter_params["start"]
if filter_params["start"]
else datetime.utcnow() - timedelta(days=90)
),
filter_params["end"]
if filter_params["end"]
else datetime.utcnow(),
)
releases_with_session_data = release_health.backend.check_releases_have_health_data(
organization.id,
filter_params["project_id"],
release_versions,
(
filter_params["start"]
if filter_params["start"]
else datetime.utcnow() - timedelta(days=90)
),
filter_params["end"] if filter_params["end"] else datetime.utcnow(),
)
valid_versions = [
rv
for rv in release_versions
if rv not in releases_with_session_data
rv for rv in release_versions if rv not in releases_with_session_data
]

results = list(
Expand All @@ -415,8 +378,7 @@ def qs_load_func(queryset, total_offset, qs_offset, limit):

paginator_cls = MergingOffsetPaginator
paginator_kwargs.update(
data_load_func=lambda offset,
limit: release_health.backend.get_project_releases_by_stability(
data_load_func=lambda offset, limit: release_health.backend.get_project_releases_by_stability(
project_ids=filter_params["project_id"],
environments=filter_params.get("environment"),
scope=sort,
Expand Down Expand Up @@ -523,9 +485,7 @@ def post(self, request: Request, organization) -> Response:
projects = []
for id_or_slug in result["projects"]:
if id_or_slug not in allowed_projects:
return Response(
{"projects": ["Invalid project ids or slugs"]}, status=400
)
return Response({"projects": ["Invalid project ids or slugs"]}, status=400)
projects.append(allowed_projects[id_or_slug])

new_status = result.get("status")
Expand Down Expand Up @@ -603,11 +563,7 @@ def post(self, request: Request, organization) -> Response:
if not request.user.is_authenticated and not request.auth:
scope.set_tag("failure_reason", "user_not_authenticated")
return Response(
{
"refs": [
"You must use an authenticated API token to fetch refs"
]
},
{"refs": ["You must use an authenticated API token to fetch refs"]},
status=400,
)
fetch_commits = not commit_list
Expand Down Expand Up @@ -637,9 +593,7 @@ def post(self, request: Request, organization) -> Response:
)

if is_org_auth_token_auth(request.auth):
update_org_auth_token_last_used(
request.auth, [project.id for project in projects]
)
update_org_auth_token_last_used(request.auth, [project.id for project in projects])

scope.set_tag("success_status", status)
return Response(serialize(release, request.user), status=status)
Expand All @@ -648,9 +602,7 @@ def post(self, request: Request, organization) -> Response:


@region_silo_endpoint
class OrganizationReleasesStatsEndpoint(
OrganizationReleasesBaseEndpoint, EnvironmentMixin
):
class OrganizationReleasesStatsEndpoint(OrganizationReleasesBaseEndpoint, EnvironmentMixin):
publish_status = {
"GET": ApiPublishStatus.UNKNOWN,
}
Expand All @@ -666,9 +618,7 @@ def get(self, request: Request, organization) -> Response:
query = request.GET.get("query")

try:
filter_params = self.get_filter_params(
request, organization, date_filter_optional=True
)
filter_params = self.get_filter_params(request, organization, date_filter_optional=True)
except NoProjects:
return Response([])

Expand All @@ -688,9 +638,7 @@ def get(self, request: Request, organization) -> Response:
queryset = add_environment_to_queryset(queryset, filter_params)
if query:
try:
queryset = _filter_releases_by_query(
queryset, organization, query, filter_params
)
queryset = _filter_releases_by_query(queryset, organization, query, filter_params)
except InvalidSearchQuery as e:
return Response(
{"detail": str(e)},
Expand All @@ -702,8 +650,7 @@ def get(self, request: Request, organization) -> Response:
queryset=queryset,
paginator_cls=OffsetPaginator,
on_results=lambda x: [
{"version": release["version"], "date": serialize(release["date"])}
for release in x
{"version": release["version"], "date": serialize(release["date"])} for release in x
],
default_per_page=1000,
max_per_page=1000,
Expand Down
Loading

0 comments on commit 683900b

Please sign in to comment.