Skip to content

Commit

Permalink
Merge branch 'master' into priscila/fix/quick-start/remove-the-basic-…
Browse files Browse the repository at this point in the history
…steps
  • Loading branch information
priscilawebdev authored Oct 20, 2024
2 parents bac0cf2 + e88eae1 commit c89eff9
Show file tree
Hide file tree
Showing 131 changed files with 2,748 additions and 598 deletions.
40 changes: 22 additions & 18 deletions src/sentry/api/endpoints/event_ai_suggested_fix.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,26 +322,30 @@ def get(self, request: Request, project, event_id) -> HttpResponse | StreamingHt
if event is None:
raise ResourceDoesNotExist

# Check the OpenAI access policy
policy = get_openai_policy(
request.organization,
request.user,
pii_certified=request.GET.get("pii_certified") == "yes",
)
policy_failure = None
stream = request.GET.get("stream") == "yes"

if policy == "subprocessor":
policy_failure = "subprocessor"
elif policy == "individual_consent":
if request.GET.get("consent") != "yes":
policy_failure = "individual_consent"
elif policy == "pii_certification_required":
policy_failure = "pii_certification_required"
elif policy == "allowed":
pass
# If the option has specifically been set to False,
if not bool(request.organization.get_option("sentry:ai_suggested_solution", default=False)):
policy_failure = "organization_consent_required"
else:
logger.warning("Unknown OpenAI policy state")
# Check the OpenAI access policy
policy = get_openai_policy(
request.organization,
request.user,
pii_certified=request.GET.get("pii_certified") == "yes",
)
stream = request.GET.get("stream") == "yes"

if policy == "subprocessor":
policy_failure = "subprocessor"
elif policy == "individual_consent":
if request.GET.get("consent") != "yes":
policy_failure = "individual_consent"
elif policy == "pii_certification_required":
policy_failure = "pii_certification_required"
elif policy == "allowed":
pass
else:
logger.warning("Unknown OpenAI policy state")

if policy_failure is not None:
return HttpResponse(
Expand Down
1 change: 1 addition & 0 deletions src/sentry/api/endpoints/group_ai_autofix.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,7 @@ def post(self, request: Request, group: Group) -> Response:
if not (
features.has("projects:ai-autofix", group.project)
or features.has("organizations:autofix", group.organization)
or group.organization.get_option("sentry:gen_ai_consent", False)
):
return self._respond_with_error("AI Autofix is not enabled for this project.", 403)

Expand Down
6 changes: 0 additions & 6 deletions src/sentry/api/endpoints/group_autofix_setup_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,12 +108,6 @@ def get(self, request: Request, group: Group) -> Response:
"""
Checks if we are able to run Autofix on the given group.
"""
if not (
features.has("projects:ai-autofix", group.project)
or features.has("organizations:autofix", group.organization)
):
return Response({"detail": "Feature not enabled for project"}, status=403)

org: Organization = request.organization
has_gen_ai_consent = org.get_option("sentry:gen_ai_consent", False)

Expand Down
26 changes: 25 additions & 1 deletion src/sentry/api/endpoints/organization_dashboard_details.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from django.db.models import F
from django.utils import timezone
from drf_spectacular.utils import extend_schema
from rest_framework.permissions import BasePermission
from rest_framework.request import Request
from rest_framework.response import Response

Expand Down Expand Up @@ -30,9 +31,32 @@
READ_FEATURE = "organizations:dashboards-basic"


class DashboardPermissions(BasePermission):
"""
Django Permissions Class for managing Dashboard Edit
permissions defined in the DashboardPermissions Model
"""

scope_map = {
"GET": ["org:read", "org:write", "org:admin"],
"POST": ["org:read", "org:write", "org:admin"],
"PUT": ["org:read", "org:write", "org:admin"],
"DELETE": ["org:read", "org:write", "org:admin"],
}

def has_object_permission(self, request: Request, view, obj):
if isinstance(obj, Dashboard) and features.has(
"organizations:dashboards-edit-access", obj.organization, actor=request.user
):
# Check if user has permissions to edit dashboard
if hasattr(obj, "permissions"):
return obj.permissions.has_edit_permissions(request.user.id)
return True


class OrganizationDashboardBase(OrganizationEndpoint):
owner = ApiOwner.PERFORMANCE
permission_classes = (OrganizationDashboardsPermission,)
permission_classes = (OrganizationDashboardsPermission, DashboardPermissions)

def convert_args(
self, request: Request, organization_id_or_slug, dashboard_id, *args, **kwargs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def get_time_params(start: datetime, end: datetime) -> MappedParams:
@region_silo_endpoint
class OrganizationTransactionAnomalyDetectionEndpoint(OrganizationEventsEndpointBase):
publish_status = {
"GET": ApiPublishStatus.UNKNOWN,
"GET": ApiPublishStatus.PRIVATE,
}

def has_feature(self, organization, request):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@ def validate(self, data):
@region_silo_endpoint
class ProjectTransactionThresholdOverrideEndpoint(OrganizationEventsV2EndpointBase):
publish_status = {
"DELETE": ApiPublishStatus.UNKNOWN,
"GET": ApiPublishStatus.UNKNOWN,
"POST": ApiPublishStatus.UNKNOWN,
"DELETE": ApiPublishStatus.PRIVATE,
"GET": ApiPublishStatus.PRIVATE,
"POST": ApiPublishStatus.PRIVATE,
}
permission_classes = (ProjectTransactionThresholdOverridePermission,)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
"/api/0/{var}/{issue_id}/events/{event_id}/": {"GET"},
"/api/0/{var}/{issue_id}/{var}/": {"GET", "POST"},
"/api/0/{var}/{issue_id}/{var}/{note_id}/": {"DELETE", "PUT"},
"/api/0/{var}/{issue_id}/hashes/": {"GET", "DELETE"},
"/api/0/{var}/{issue_id}/hashes/": {"GET", "DELETE", "PUT"},
"/api/0/{var}/{issue_id}/reprocessing/": {"POST"},
"/api/0/{var}/{issue_id}/stats/": {"GET"},
"/api/0/{var}/{issue_id}/tags/": {"GET"},
Expand Down Expand Up @@ -82,6 +82,7 @@
"/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/hashes/": {
"GET",
"DELETE",
"PUT",
},
"/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/reprocessing/": {"POST"},
"/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/stats/": {"GET"},
Expand Down
32 changes: 22 additions & 10 deletions src/sentry/data_secrecy/api/waive_data_secrecy.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
from collections.abc import Mapping
from typing import Any

Expand All @@ -20,6 +21,8 @@
from sentry.data_secrecy.models import DataSecrecyWaiver
from sentry.models.organization import Organization

logger = logging.getLogger("sentry.data_secrecy")


class WaiveDataSecrecyPermission(OrganizationPermission):
scope_map = {
Expand Down Expand Up @@ -124,20 +127,29 @@ def delete(self, request: Request, organization: Organization):
Reinstates data secrecy for an organization.
"""
try:
logger.info("Reinstating data secrecy for organization %s", organization.id)
ds = DataSecrecyWaiver.objects.get(organization=organization)
ds.delete()

self.create_audit_entry(
request=request,
organization=organization,
event=audit_log.get_event_id("DATA_SECRECY_REINSTATED"),
)
return Response(
{"detail": "Data secrecy has been reinstated."},
status=status.HTTP_204_NO_CONTENT,
logger.info(
"Data secrecy waiver found for organization %s",
organization.id,
extra={"ds": ds.id},
)
except DataSecrecyWaiver.DoesNotExist:
logger.info("No data secrecy waiver found for organization %s", organization.id)
return Response(
{"detail": "No data secrecy waiver found for this organization."},
status=status.HTTP_404_NOT_FOUND,
)

ds.delete()
logger.info("Data secrecy waiver deleted for organization %s", organization.id)

self.create_audit_entry(
request=request,
organization=organization,
event=audit_log.get_event_id("DATA_SECRECY_REINSTATED"),
)
return Response(
{"detail": "Data secrecy has been reinstated."},
status=status.HTTP_204_NO_CONTENT,
)
11 changes: 11 additions & 0 deletions src/sentry/eventstore/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ class EventStorage(Service):
"get_events_snql",
"get_unfetched_events",
"get_adjacent_event_ids",
"get_adjacent_event_ids_snql",
"bind_nodes",
"get_unfetched_transactions",
)
Expand Down Expand Up @@ -272,6 +273,16 @@ def get_event_by_id(
"""
raise NotImplementedError

def get_adjacent_event_ids_snql(
self,
organization_id: int,
project_id: int,
group_id: int,
environments: list[str],
event: Event | GroupEvent,
):
raise NotImplementedError

def get_adjacent_event_ids(self, event, filter):
"""
Gets the previous and next event IDs given a current event and some conditions/filters.
Expand Down
110 changes: 109 additions & 1 deletion src/sentry/eventstore/snuba/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
from sentry.snuba.dataset import Dataset
from sentry.snuba.events import Columns
from sentry.utils import snuba
from sentry.utils.snuba import DATASETS, _prepare_start_end, raw_snql_query
from sentry.utils.snuba import DATASETS, _prepare_start_end, bulk_snuba_queries, raw_snql_query
from sentry.utils.validators import normalize_event_id

EVENT_ID = Columns.EVENT_ID.value.alias
Expand Down Expand Up @@ -451,6 +451,114 @@ def _get_dataset_for_event(self, event):
else:
return Dataset.Discover

def get_adjacent_event_ids_snql(
self, organization_id, project_id, group_id, environments, event
):
"""
Utility function for grabbing an event's adjascent events,
which are the ones with the closest timestamps before and after.
This function is only used in project_event_details at the moment,
so it's interface is tailored to that. We use SnQL and use the project_id
and toStartOfDay(timestamp) to efficently scan our table
"""
dataset = self._get_dataset_for_event(event)
app_id = "eventstore"
referrer = "eventstore.get_next_or_prev_event_id_snql"
tenant_ids = {"organization_id": organization_id}
environment_conditions = []
if environments:
environment_conditions.append(Condition(Column("environment"), Op.IN, environments))

def make_constant_conditions():
environment_conditions = []
if environments:
environment_conditions.append(Condition(Column("environment"), Op.IN, environments))

group_conditions = []
if group_id:
group_conditions.append(Condition(Column("group_id"), Op.EQ, group_id))
project_conditions = [Condition(Column("project_id"), Op.EQ, project_id)]
return [
*environment_conditions,
*group_conditions,
*project_conditions,
]

def make_prev_timestamp_conditions(event):
return [
Condition(
Column(DATASETS[dataset][Columns.TIMESTAMP.value.alias]),
Op.GTE,
event.datetime - timedelta(days=100),
),
Condition(
Column(DATASETS[dataset][Columns.TIMESTAMP.value.alias]),
Op.LT,
event.datetime + timedelta(seconds=1),
),
Condition(Column("event_id"), Op.LT, event.event_id),
]

def make_next_timestamp_conditions(event):
return [
Condition(
Column(DATASETS[dataset][Columns.TIMESTAMP.value.alias]),
Op.LT,
event.datetime + timedelta(days=100),
),
Condition(
Column(DATASETS[dataset][Columns.TIMESTAMP.value.alias]), Op.GTE, event.datetime
),
Condition(Column("event_id"), Op.GT, event.event_id),
]

def make_request(is_prev):
order_by_direction = Direction.DESC if is_prev else Direction.ASC
conditions = make_constant_conditions()
conditions.extend(
make_prev_timestamp_conditions(event)
if is_prev
else make_next_timestamp_conditions(event)
)
return Request(
dataset=dataset.value,
app_id=app_id,
query=Query(
match=Entity(dataset.value),
select=[Column("event_id"), Column("project_id")],
where=conditions,
orderby=[
OrderBy(
Column("project_id"),
direction=order_by_direction,
),
OrderBy(
Function("toStartOfDay", [Column("timestamp")]),
direction=order_by_direction,
),
OrderBy(
Column("timestamp"),
direction=order_by_direction,
),
OrderBy(
Column("event_id"),
direction=order_by_direction,
),
],
limit=Limit(1),
),
tenant_ids=tenant_ids,
)

snql_request_prev = make_request(is_prev=True)
snql_request_next = make_request(is_prev=False)

bulk_snql_results = bulk_snuba_queries(
[snql_request_prev, snql_request_next], referrer=referrer
)
event_ids = [self.__get_event_id_from_result(result) for result in bulk_snql_results]
return event_ids

def get_adjacent_event_ids(self, event, filter):
"""
Returns (project_id, event_id) of a previous event given a current event
Expand Down
4 changes: 0 additions & 4 deletions src/sentry/features/temporary.py
Original file line number Diff line number Diff line change
Expand Up @@ -497,14 +497,10 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:transaction-name-normalize", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, default=True, api_expose=False)
# Sanitize transaction names in the ingestion pipeline. # Deprecated
manager.add("organizations:transaction-name-sanitization", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enables creation and full updating of uptime monitors via the api
manager.add("organizations:uptime-api-create-update", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enables automatic hostname detection in uptime
manager.add("organizations:uptime-automatic-hostname-detection", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enables automatic subscription creation in uptime
manager.add("organizations:uptime-automatic-subscription-creation", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enabled returning uptime monitors from the rule api
manager.add("organizations:uptime-rule-api", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enable creating issues via the issue platform
manager.add("organizations:uptime-create-issues", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enables uptime related settings for projects and orgs
Expand Down
Loading

0 comments on commit c89eff9

Please sign in to comment.