From 337054da04dfad9a7efd861d34a022f7cefdd124 Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Wed, 31 Jul 2024 09:27:04 -0700 Subject: [PATCH 01/52] fix(prism): Remove unnecessary vertical-align: middle style (#75313) The style adds unnecessary whitespace between lines and causes line highlighting to be misaligned. --- static/app/styles/global.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/static/app/styles/global.tsx b/static/app/styles/global.tsx index dc5f0fc4fc5f6..e0b65f130ec1b 100644 --- a/static/app/styles/global.tsx +++ b/static/app/styles/global.tsx @@ -22,7 +22,6 @@ const prismStyles = (theme: Theme) => css` code { background: unset; - vertical-align: middle; } } From 93fdcc9785401adabd123a55e34965dc30d44a90 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Wed, 31 Jul 2024 09:47:19 -0700 Subject: [PATCH 02/52] chore(utils): Fix new circuit breaker docstring re: instantiation (#75197) This updates the docstring of the new circuit breaker to reflect the fact that it must be reinstantiated at runtime. --- src/sentry/utils/circuit_breaker2.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/sentry/utils/circuit_breaker2.py b/src/sentry/utils/circuit_breaker2.py index da144a77097ec..3988fd917c9fa 100644 --- a/src/sentry/utils/circuit_breaker2.py +++ b/src/sentry/utils/circuit_breaker2.py @@ -81,10 +81,13 @@ class CircuitBreaker: Usage: - # See `CircuitBreakerConfig` class for config options - breaker = CircuitBreaker("squirrel_chasing", config) - def get_top_dogs(payload): + # See `CircuitBreakerConfig` class for config options + breaker = CircuitBreaker( + settings.SQUIRREL_CHASING_CIRCUIT_BREAKER_KEY, + options.get("squirrel_chasing.circuit_breaker_config"), + ) + # Check the state of the breaker before calling the service try: if breaker.should_allow_request(): @@ -111,9 +114,9 @@ def get_top_dogs(payload): return format_hof_entries(response) The `breaker.should_allow_request()` check can alternatively be used outside of `get_top_dogs`, - to prevent calls to it. In that case, the original `breaker` object can be imported alongside - `get_top_dogs` or reinstantiated with the same config - it has no state of its own, instead - relying on redis-backed rate limiters and redis itself to track error count and breaker status. + to prevent calls to it. In that case, the circuit breaker must be reinstantiated with the same + config. This works because the breaker has no state of its own, instead relying on redis-backed + rate limiters and redis itself to track error count and breaker status. """ def __init__(self, key: str, config: CircuitBreakerConfig): From 15a9d548c6795189982d2b6de54322223a7d5349 Mon Sep 17 00:00:00 2001 From: Dora Date: Wed, 31 Jul 2024 09:49:26 -0700 Subject: [PATCH 03/52] fix(trace view): replace with keydown for shortcuts (#75346) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **Before:** ![Screenshot 2024-07-31 at 9 20 21 AM](https://github.com/user-attachments/assets/0ecce73e-2036-4877-82b1-2d57a3e151f0) **After:** ![Screenshot 2024-07-31 at 9 16 21 AM](https://github.com/user-attachments/assets/6567eef4-f781-4d41-b68c-770a5d696820) --- .../views/performance/newTraceDetails/traceShortcutsModal.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/static/app/views/performance/newTraceDetails/traceShortcutsModal.tsx b/static/app/views/performance/newTraceDetails/traceShortcutsModal.tsx index a698405fec047..7b42a09ddfccc 100644 --- a/static/app/views/performance/newTraceDetails/traceShortcutsModal.tsx +++ b/static/app/views/performance/newTraceDetails/traceShortcutsModal.tsx @@ -5,6 +5,7 @@ import tracingKeyboardShortcuts from 'sentry-images/spot/tracing-keyboard-shortc import {type ModalRenderProps, openModal} from 'sentry/actionCreators/modal'; import {Button} from 'sentry/components/button'; +import {IconKeyDown} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import useOrganization from 'sentry/utils/useOrganization'; @@ -19,7 +20,7 @@ export function TraceShortcuts() { return ( ); } From dd6126689397e3b88bdea97d2310b127978be6bc Mon Sep 17 00:00:00 2001 From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com> Date: Wed, 31 Jul 2024 09:50:56 -0700 Subject: [PATCH 04/52] feat(issue-details): Update group header (#74672) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit this pr introduces changes to the group header on issue details. In addition to styling changes, it also combines the first/last seen, participant, assigned, and viewer sections from the sidebar into the header. for now, it hides the elements that it will replace. to limit the scope of this PR, additional functionality changes will be made in future PRs (expanding partipants/viewers, fine tuning first/last seen for example) Screenshot 2024-07-24 at 2 08 53 PM --- static/app/components/group/assignedTo.tsx | 42 +-- .../app/components/group/assigneeSelector.tsx | 117 +++++++++ static/app/components/group/releaseStats.tsx | 112 ++++---- static/app/components/stream/group.tsx | 45 +--- .../app/views/issueDetails/actions/index.tsx | 241 +++++++++++------ .../app/views/issueDetails/groupDetails.tsx | 40 ++- .../groupEventDetails/groupEventDetails.tsx | 2 +- .../app/views/issueDetails/groupSidebar.tsx | 15 +- static/app/views/issueDetails/header.tsx | 75 ++---- .../views/issueDetails/streamlinedHeader.tsx | 246 ++++++++++++++++++ .../views/issueDetails/updatedHeader.spec.tsx | 101 +++++++ .../issueDetails/useIssueDetailsHeader.tsx | 90 +++++++ 12 files changed, 845 insertions(+), 281 deletions(-) create mode 100644 static/app/components/group/assigneeSelector.tsx create mode 100644 static/app/views/issueDetails/streamlinedHeader.tsx create mode 100644 static/app/views/issueDetails/updatedHeader.spec.tsx create mode 100644 static/app/views/issueDetails/useIssueDetailsHeader.tsx diff --git a/static/app/components/group/assignedTo.tsx b/static/app/components/group/assignedTo.tsx index 5cea17eeabe2e..ff89ce648eb2b 100644 --- a/static/app/components/group/assignedTo.tsx +++ b/static/app/components/group/assignedTo.tsx @@ -1,14 +1,10 @@ import {useEffect, useState} from 'react'; import styled from '@emotion/styled'; -import {assignToActor, clearAssignment} from 'sentry/actionCreators/group'; -import {addErrorMessage} from 'sentry/actionCreators/indicator'; import {fetchOrgMembers} from 'sentry/actionCreators/members'; import {openIssueOwnershipRuleModal} from 'sentry/actionCreators/modal'; import Access from 'sentry/components/acl/access'; -import AssigneeSelectorDropdown, { - type AssignableEntity, -} from 'sentry/components/assigneeSelectorDropdown'; +import AssigneeSelectorDropdown from 'sentry/components/assigneeSelectorDropdown'; import GuideAnchor from 'sentry/components/assistant/guideAnchor'; import ActorAvatar from 'sentry/components/avatar/actorAvatar'; import {Button} from 'sentry/components/button'; @@ -17,6 +13,7 @@ import type { OnAssignCallback, SuggestedAssignee, } from 'sentry/components/deprecatedAssigneeSelectorDropdown'; +import {useHandleAssigneeChange} from 'sentry/components/group/assigneeSelector'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import * as SidebarSection from 'sentry/components/sidebarSection'; import {IconSettings, IconUser} from 'sentry/icons'; @@ -28,8 +25,6 @@ import type {Actor, Commit, Committer, Group, Project} from 'sentry/types'; import type {Event} from 'sentry/types/event'; import {defined} from 'sentry/utils'; import type {FeedbackIssue} from 'sentry/utils/feedback/types'; -import {useMutation} from 'sentry/utils/queryClient'; -import type RequestError from 'sentry/utils/requestError/requestError'; import {toTitleCase} from 'sentry/utils/string/toTitleCase'; import useApi from 'sentry/utils/useApi'; import useCommitters from 'sentry/utils/useCommitters'; @@ -191,35 +186,10 @@ function AssignedTo({ } ); - const {mutate: handleAssigneeChange, isLoading: assigneeLoading} = useMutation< - AssignableEntity | null, - RequestError, - AssignableEntity | null - >({ - mutationFn: async ( - newAssignee: AssignableEntity | null - ): Promise => { - if (newAssignee) { - await assignToActor({ - id: group.id, - orgSlug: organization.slug, - actor: {id: newAssignee.id, type: newAssignee.type}, - assignedBy: 'assignee_selector', - }); - return Promise.resolve(newAssignee); - } - - await clearAssignment(group.id, organization.slug, 'assignee_selector'); - return Promise.resolve(null); - }, - onSuccess: (newAssignee: AssignableEntity | null) => { - if (onAssign && newAssignee) { - onAssign(newAssignee.type, newAssignee.assignee, newAssignee.suggestedAssignee); - } - }, - onError: () => { - addErrorMessage('Failed to update assignee'); - }, + const {handleAssigneeChange, assigneeLoading} = useHandleAssigneeChange({ + organization, + group, + onAssign, }); useEffect(() => { diff --git a/static/app/components/group/assigneeSelector.tsx b/static/app/components/group/assigneeSelector.tsx new file mode 100644 index 0000000000000..fe02a4b1bc3ea --- /dev/null +++ b/static/app/components/group/assigneeSelector.tsx @@ -0,0 +1,117 @@ +import styled from '@emotion/styled'; + +import {assignToActor, clearAssignment} from 'sentry/actionCreators/group'; +import {addErrorMessage} from 'sentry/actionCreators/indicator'; +import {AssigneeBadge} from 'sentry/components/assigneeBadge'; +import AssigneeSelectorDropdown, { + type AssignableEntity, +} from 'sentry/components/assigneeSelectorDropdown'; +import {Button} from 'sentry/components/button'; +import type {OnAssignCallback} from 'sentry/components/deprecatedAssigneeSelectorDropdown'; +import {t} from 'sentry/locale'; +import type {Group} from 'sentry/types/group'; +import type {Organization} from 'sentry/types/organization'; +import type {User} from 'sentry/types/user'; +import {useMutation} from 'sentry/utils/queryClient'; +import type RequestError from 'sentry/utils/requestError/requestError'; + +interface AssigneeSelectorProps { + assigneeLoading: boolean; + group: Group; + handleAssigneeChange: (assignedActor: AssignableEntity | null) => void; + memberList?: User[]; +} + +export function useHandleAssigneeChange({ + organization, + group, + onAssign, +}: { + group: Group; + organization: Organization; + onAssign?: OnAssignCallback; +}) { + const {mutate: handleAssigneeChange, isLoading: assigneeLoading} = useMutation< + AssignableEntity | null, + RequestError, + AssignableEntity | null + >({ + mutationFn: async ( + newAssignee: AssignableEntity | null + ): Promise => { + if (newAssignee) { + await assignToActor({ + id: group.id, + orgSlug: organization.slug, + actor: {id: newAssignee.id, type: newAssignee.type}, + assignedBy: 'assignee_selector', + }); + return Promise.resolve(newAssignee); + } + + await clearAssignment(group.id, organization.slug, 'assignee_selector'); + return Promise.resolve(null); + }, + onSuccess: (newAssignee: AssignableEntity | null) => { + if (onAssign && newAssignee) { + onAssign(newAssignee.type, newAssignee.assignee, newAssignee.suggestedAssignee); + } + }, + onError: () => { + addErrorMessage('Failed to update assignee'); + }, + }); + + return {handleAssigneeChange, assigneeLoading}; +} + +/** + * Assignee selector used on issue details + issue stream. Uses `AssigneeSelectorDropdown` which controls most of the logic while this is primarily responsible for the design. + */ +export function AssigneeSelector({ + group, + memberList, + assigneeLoading, + handleAssigneeChange, +}: AssigneeSelectorProps) { + return ( + + handleAssigneeChange(assignedActor) + } + onClear={() => handleAssigneeChange(null)} + trigger={(props, isOpen) => ( + + { + const [_ownershipType, ownerId] = owner.owner.split(':'); + return ownerId === group.assignedTo?.id; + })?.type + } + loading={assigneeLoading} + chevronDirection={isOpen ? 'up' : 'down'} + /> + + )} + /> + ); +} + +const StyledDropdownButton = styled(Button)` + font-weight: ${p => p.theme.fontWeightNormal}; + border: none; + padding: 0; + height: unset; + border-radius: 10px; + box-shadow: none; +`; diff --git a/static/app/components/group/releaseStats.tsx b/static/app/components/group/releaseStats.tsx index 50ab1e92742cd..2198991628937 100644 --- a/static/app/components/group/releaseStats.tsx +++ b/static/app/components/group/releaseStats.tsx @@ -13,6 +13,7 @@ import type {CurrentRelease, Group, Organization, Project, Release} from 'sentry import {defined} from 'sentry/utils'; import getDynamicText from 'sentry/utils/getDynamicText'; import {useApiQuery} from 'sentry/utils/queryClient'; +import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils'; import QuestionTooltip from '../questionTooltip'; @@ -60,6 +61,8 @@ function GroupReleaseStats({ } ); + const hasStreamlinedUI = useHasStreamlinedUI(); + const firstRelease = groupReleaseData?.firstRelease; const lastRelease = groupReleaseData?.lastRelease; @@ -103,59 +106,62 @@ function GroupReleaseStats({ lastSeen={group.lastSeen} /> - - - - - {t('Last Seen')} - - - - - - - - - - {t('First Seen')} - - - - - - + {!hasStreamlinedUI && ( +
+ + + + {t('Last Seen')} + + + + + + + + + + {t('First Seen')} + + + + + + +
+ )} {!hasRelease ? ( {t('Releases')} diff --git a/static/app/components/stream/group.tsx b/static/app/components/stream/group.tsx index 0f873d2fe4bea..0dcb160e45b8b 100644 --- a/static/app/components/stream/group.tsx +++ b/static/app/components/stream/group.tsx @@ -6,17 +6,14 @@ import type {LocationDescriptor} from 'history'; import {assignToActor, clearAssignment} from 'sentry/actionCreators/group'; import {addErrorMessage} from 'sentry/actionCreators/indicator'; -import {AssigneeBadge} from 'sentry/components/assigneeBadge'; -import AssigneeSelectorDropdown, { - type AssignableEntity, -} from 'sentry/components/assigneeSelectorDropdown'; +import type {AssignableEntity} from 'sentry/components/assigneeSelectorDropdown'; import GuideAnchor from 'sentry/components/assistant/guideAnchor'; -import {Button} from 'sentry/components/button'; import GroupStatusChart from 'sentry/components/charts/groupStatusChart'; import Checkbox from 'sentry/components/checkbox'; import Count from 'sentry/components/count'; import EventOrGroupExtraDetails from 'sentry/components/eventOrGroupExtraDetails'; import EventOrGroupHeader from 'sentry/components/eventOrGroupHeader'; +import {AssigneeSelector} from 'sentry/components/group/assigneeSelector'; import {getBadgeProperties} from 'sentry/components/group/inboxBadges/statusBadge'; import type {GroupListColumn} from 'sentry/components/issues/groupList'; import Link from 'sentry/components/links/link'; @@ -548,34 +545,11 @@ function BaseGroupRow({ ) : null} {withColumns.includes('assignee') && ( - - handleAssigneeChange(assignedActor) - } - onClear={() => handleAssigneeChange(null)} - trigger={(props, isOpen) => ( - - { - const [_ownershipType, ownerId] = owner.owner.split(':'); - return ownerId === group.assignedTo?.id; - })?.type - } - loading={assigneeLoading} - chevronDirection={isOpen ? 'up' : 'down'} - /> - - )} /> )} @@ -590,15 +564,6 @@ const StreamGroup = withOrganization(BaseGroupRow); export default StreamGroup; -const StyledDropdownButton = styled(Button)` - font-weight: ${p => p.theme.fontWeightNormal}; - border: none; - padding: 0; - height: unset; - border-radius: 10px; - box-shadow: none; -`; - // Position for wrapper is relative for overlay actions const Wrapper = styled(PanelItem)<{ reviewed: boolean; diff --git a/static/app/views/issueDetails/actions/index.tsx b/static/app/views/issueDetails/actions/index.tsx index 1e2c4bcc35e2c..b4feb993cd57f 100644 --- a/static/app/views/issueDetails/actions/index.tsx +++ b/static/app/views/issueDetails/actions/index.tsx @@ -16,8 +16,14 @@ import ResolveActions from 'sentry/components/actions/resolve'; import GuideAnchor from 'sentry/components/assistant/guideAnchor'; import {Button} from 'sentry/components/button'; import {DropdownMenu} from 'sentry/components/dropdownMenu'; +import Divider from 'sentry/components/events/interfaces/debugMeta/debugImageDetails/candidate/information/divider'; import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; -import {IconEllipsis, IconSubscribed, IconUnsubscribed} from 'sentry/icons'; +import { + IconCheckmark, + IconEllipsis, + IconSubscribed, + IconUnsubscribed, +} from 'sentry/icons'; import {t} from 'sentry/locale'; import GroupStore from 'sentry/stores/groupStore'; import IssueListCacheStore from 'sentry/stores/IssueListCacheStore'; @@ -47,6 +53,7 @@ import withApi from 'sentry/utils/withApi'; import withOrganization from 'sentry/utils/withOrganization'; import {hasDatasetSelector} from 'sentry/views/dashboards/utils'; import {NewIssueExperienceButton} from 'sentry/views/issueDetails/actions/newIssueExperienceButton'; +import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils'; import ShareIssueModal from './shareModal'; import SubscribeAction from './subscribeAction'; @@ -87,6 +94,8 @@ export function Actions(props: Props) { const config = useMemo(() => getConfigForIssueType(group, project), [group, project]); + const hasStreamlinedUI = useHasStreamlinedUI(); + const { actions: { archiveUntilOccurrence: archiveUntilOccurrenceCap, @@ -361,6 +370,67 @@ export function Actions(props: Props) { }); return ( + {hasStreamlinedUI && + (isResolved || isIgnored ? ( + + + + {isResolved ? t('Resolved') : t('Archived')} + + + + onUpdate({ + status: GroupStatus.UNRESOLVED, + + statusDetails: {}, + + substatus: GroupSubstatus.ONGOING, + }) + } + > + {isResolved ? t('Unresolve') : t('Unarchive')} + + + ) : ( + + + + + + + : } + size="sm" + /> + + ))} - {organization.features.includes('issue-details-new-experience-toggle') ? ( - - ) : null} - : } - size="sm" - /> -
- -
- {discoverCap.enabled && ( - - trackIssueAction('open_in_discover')} - size="sm" - > - {t('Open in Discover')} - - - )} - {isResolved || isIgnored ? ( - - onUpdate({ - status: GroupStatus.UNRESOLVED, - statusDetails: {}, - substatus: GroupSubstatus.ONGOING, - }) - } - > - {isIgnored ? t('Archived') : t('Resolved')} - - ) : ( + {!hasStreamlinedUI && ( - + ) : null} + : } + size="sm" /> - - + + + {discoverCap.enabled && ( + + trackIssueAction('open_in_discover')} + size="sm" + > + + {t('Open in Discover')} + + + + )} + {isResolved || isIgnored ? ( + - + title={ + isAutoResolved + ? t( + 'This event is resolved due to the Auto Resolve configuration for this project' + ) + : t('Change status to unresolved') + } + size="sm" + disabled={disabled || isAutoResolved} + onClick={() => + onUpdate({ + status: GroupStatus.UNRESOLVED, + statusDetails: {}, + substatus: GroupSubstatus.ONGOING, + }) + } + > + {isIgnored ? t('Archived') : t('Resolved')} + + ) : ( + + + + + + + )} )}
@@ -531,4 +607,19 @@ const ActionWrapper = styled('div')` gap: ${space(0.5)}; `; +const ResolvedWrapper = styled('div')` + display: flex; + gap: ${space(0.5)}; + align-items: center; + color: ${p => p.theme.green400}; + font-weight: bold; + font-size: ${p => p.theme.fontSizeLarge}; +`; + +const ResolvedActionWapper = styled('div')` + display: flex; + gap: ${space(1)}; + align-items: center; +`; + export default withApi(withOrganization(Actions)); diff --git a/static/app/views/issueDetails/groupDetails.tsx b/static/app/views/issueDetails/groupDetails.tsx index 7c46e537f6660..f2b67316b6d7e 100644 --- a/static/app/views/issueDetails/groupDetails.tsx +++ b/static/app/views/issueDetails/groupDetails.tsx @@ -53,11 +53,11 @@ import {useParams} from 'sentry/utils/useParams'; import useProjects from 'sentry/utils/useProjects'; import useRouter from 'sentry/utils/useRouter'; import {useUser} from 'sentry/utils/useUser'; - -import {ERROR_TYPES} from './constants'; -import GroupHeader from './header'; -import SampleEventAlert from './sampleEventAlert'; -import {Tab, TabPaths} from './types'; +import GroupHeader from 'sentry/views/issueDetails//header'; +import {ERROR_TYPES} from 'sentry/views/issueDetails/constants'; +import SampleEventAlert from 'sentry/views/issueDetails/sampleEventAlert'; +import StreamlinedGroupHeader from 'sentry/views/issueDetails/streamlinedHeader'; +import {Tab, TabPaths} from 'sentry/views/issueDetails/types'; import { getGroupDetailsQueryData, getGroupEventDetailsQueryData, @@ -67,7 +67,8 @@ import { useDefaultIssueEvent, useEnvironmentsFromUrl, useFetchIssueTagsForDetailsPage, -} from './utils'; + useHasStreamlinedUI, +} from 'sentry/views/issueDetails/utils'; type Error = (typeof ERROR_TYPES)[keyof typeof ERROR_TYPES] | null; @@ -683,6 +684,8 @@ function GroupDetailsContent({ const environments = useEnvironmentsFromUrl(); + const hasStreamlinedUI = useHasStreamlinedUI(); + useTrackView({group, event, project, tab: currentTab}); const childProps = { @@ -702,14 +705,23 @@ function GroupDetailsContent({ value={currentTab} onChange={tab => trackTabChanged({tab, group, project, event, organization})} > - + {hasStreamlinedUI ? ( + + ) : ( + + )} {isValidElement(children) ? cloneElement(children, childProps) : children} diff --git a/static/app/views/issueDetails/groupEventDetails/groupEventDetails.tsx b/static/app/views/issueDetails/groupEventDetails/groupEventDetails.tsx index 6b0a5ea980b0f..2bcc697e2326e 100644 --- a/static/app/views/issueDetails/groupEventDetails/groupEventDetails.tsx +++ b/static/app/views/issueDetails/groupEventDetails/groupEventDetails.tsx @@ -186,7 +186,7 @@ function GroupEventDetails(props: GroupEventDetailsProps) { ) : ( - {renderGroupStatusBanner()} + {!hasStreamlinedUI && renderGroupStatusBanner()} {eventWithMeta && issueTypeConfig.stats.enabled && ( { @@ -257,7 +262,9 @@ export default function GroupSidebar({ return ( - + {!hasStreamlinedUI && ( + + )} {issueTypeConfig.stats.enabled && ( )} - {renderParticipantData()} - {renderSeenByList()} + {!hasStreamlinedUI && renderParticipantData()} + {!hasStreamlinedUI && renderSeenByList()} ); } diff --git a/static/app/views/issueDetails/header.tsx b/static/app/views/issueDetails/header.tsx index 8f8852c671c8e..bc647a5fcb8fc 100644 --- a/static/app/views/issueDetails/header.tsx +++ b/static/app/views/issueDetails/header.tsx @@ -1,4 +1,4 @@ -import {Fragment, useEffect, useMemo} from 'react'; +import {Fragment, useEffect} from 'react'; import styled from '@emotion/styled'; import type {LocationDescriptor} from 'history'; import omit from 'lodash/omit'; @@ -22,7 +22,6 @@ import {space} from 'sentry/styles/space'; import type {Event, Group, Organization, Project} from 'sentry/types'; import {IssueCategory, IssueType} from 'sentry/types/group'; import {trackAnalytics} from 'sentry/utils/analytics'; -import {getMessage} from 'sentry/utils/events'; import {getConfigForIssueType} from 'sentry/utils/issueTypeConfig'; import useReplayCountForIssues from 'sentry/utils/replayCount/useReplayCountForIssues'; import {projectCanLinkToReplay} from 'sentry/utils/replays/projectSupportsReplay'; @@ -30,11 +29,11 @@ import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyti import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import GroupPriority from 'sentry/views/issueDetails/groupPriority'; +import {useIssueDetailsHeader} from 'sentry/views/issueDetails/useIssueDetailsHeader'; import GroupActions from './actions'; -import {ShortIdBreadcrumb} from './shortIdBreadcrumb'; import {Tab} from './types'; -import {ReprocessingStatus} from './utils'; +import type {ReprocessingStatus} from './utils'; type Props = { baseUrl: string; @@ -50,7 +49,7 @@ interface GroupHeaderTabsProps extends Pick { - if (groupReprocessingStatus === ReprocessingStatus.REPROCESSING) { - return [ - Tab.ACTIVITY, - Tab.USER_FEEDBACK, - Tab.ATTACHMENTS, - Tab.EVENTS, - Tab.MERGED, - Tab.SIMILAR_ISSUES, - Tab.TAGS, - ]; - } - - if (groupReprocessingStatus === ReprocessingStatus.REPROCESSED_AND_HASNT_EVENT) { - return [ - Tab.DETAILS, - Tab.ATTACHMENTS, - Tab.EVENTS, - Tab.MERGED, - Tab.SIMILAR_ISSUES, - Tab.TAGS, - Tab.USER_FEEDBACK, - ]; - } - - return []; - }, [groupReprocessingStatus]); - - const eventRoute = useMemo(() => { - const searchTermWithoutQuery = omit(location.query, 'query'); - return { - pathname: `${baseUrl}events/`, - query: searchTermWithoutQuery, - }; - }, [location, baseUrl]); + const { + disabledTabs, + message, + eventRoute, + disableActions, + shortIdBreadcrumb, + className, + } = useIssueDetailsHeader({ + group, + groupReprocessingStatus, + baseUrl, + project, + }); const {userCount} = group; - let className = 'group-detail'; - - if (group.hasSeen) { - className += ' hasSeen'; - } - - if (group.status === 'resolved') { - className += ' isResolved'; - } - - const message = getMessage(group); - - const disableActions = !!disabledTabs.length; - - const shortIdBreadcrumb = ( - - ); - const issueTypeConfig = getConfigForIssueType(group, project); const NEW_ISSUE_TYPES = [IssueType.REPLAY_HYDRATION_ERROR]; // adds a "new" banner next to the title diff --git a/static/app/views/issueDetails/streamlinedHeader.tsx b/static/app/views/issueDetails/streamlinedHeader.tsx new file mode 100644 index 0000000000000..969bb77120b0c --- /dev/null +++ b/static/app/views/issueDetails/streamlinedHeader.tsx @@ -0,0 +1,246 @@ +import {useMemo} from 'react'; +import styled from '@emotion/styled'; + +import AvatarList from 'sentry/components/avatar/avatarList'; +import {Breadcrumbs} from 'sentry/components/breadcrumbs'; +import EventOrGroupTitle from 'sentry/components/eventOrGroupTitle'; +import EventMessage from 'sentry/components/events/eventMessage'; +import Divider from 'sentry/components/events/interfaces/debugMeta/debugImageDetails/candidate/information/divider'; +import { + AssigneeSelector, + useHandleAssigneeChange, +} from 'sentry/components/group/assigneeSelector'; +import * as Layout from 'sentry/components/layouts/thirds'; +import Version from 'sentry/components/version'; +import {t} from 'sentry/locale'; +import ConfigStore from 'sentry/stores/configStore'; +import {space} from 'sentry/styles/space'; +import type { + Event, + Group, + Project, + Release, + TeamParticipant, + UserParticipant, +} from 'sentry/types'; +import {useApiQuery} from 'sentry/utils/queryClient'; +import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import GroupActions from 'sentry/views/issueDetails/actions/index'; +import GroupPriority from 'sentry/views/issueDetails/groupPriority'; +import {GroupHeaderTabs} from 'sentry/views/issueDetails/header'; +import {useIssueDetailsHeader} from 'sentry/views/issueDetails/useIssueDetailsHeader'; +import type {ReprocessingStatus} from 'sentry/views/issueDetails/utils'; + +interface GroupRelease { + firstRelease: Release; + lastRelease: Release; +} + +interface GroupHeaderProps { + baseUrl: string; + group: Group; + groupReprocessingStatus: ReprocessingStatus; + project: Project; + event?: Event; +} + +export default function StreamlinedGroupHeader({ + group, + project, + baseUrl, + groupReprocessingStatus, + event, +}: GroupHeaderProps) { + const location = useLocation(); + const organization = useOrganization(); + const {sort: _sort, ...query} = location.query; + + const {data: groupReleaseData} = useApiQuery( + [`/organizations/${organization.slug}/issues/${group.id}/first-last-release/`], + { + staleTime: 30000, + cacheTime: 30000, + } + ); + + const {firstRelease, lastRelease} = groupReleaseData || {}; + + const {handleAssigneeChange, assigneeLoading} = useHandleAssigneeChange({ + organization, + group, + }); + + const {disabledTabs, message, eventRoute, disableActions, shortIdBreadcrumb} = + useIssueDetailsHeader({ + group, + groupReprocessingStatus, + baseUrl, + project, + }); + + const activeUser = ConfigStore.get('user'); + + const {userParticipants, teamParticipants, displayUsers} = useMemo(() => { + return { + userParticipants: group.participants.filter( + (p): p is UserParticipant => p.type === 'user' + ), + teamParticipants: group.participants.filter( + (p): p is TeamParticipant => p.type === 'team' + ), + displayUsers: group.seenBy.filter(user => activeUser.id !== user.id), + }; + }, [group, activeUser.id]); + + return ( + +
+ + + + + + + + + +
{t('First Seen in')}
+ + +
{t('Last Seen in')}
+ +
+ + + + + + {t('Priority')} + + + + {t('Assignee')} + + + {group.participants.length > 0 && ( + + {t('Participants')} +
+ +
+
+ )} + {displayUsers.length > 0 && ( + + {t('Viewers')} + + + )} +
+
+ +
+
+ ); +} + +const StyledEventOrGroupTitle = styled(EventOrGroupTitle)` + font-size: inherit; +`; + +const TitleWrapper = styled('h3')` + font-size: ${p => p.theme.headerFontSize}; + margin: 0 0 8px; + text-overflow: ellipsis; + white-space: nowrap; + overflow: hidden; + color: ${p => p.theme.headingColor}; + + & em { + font-weight: ${p => p.theme.fontWeightNormal}; + color: ${p => p.theme.textColor}; + font-size: 90%; + } +`; + +const TitleHeading = styled('div')` + display: flex; + line-height: 2; + gap: ${space(1)}; +`; + +const StyledBreak = styled('hr')` + margin-top: ${space(3)}; + margin-bottom: 0; + border-color: ${p => p.theme.border}; +`; + +const MessageWrapper = styled('div')` + display: flex; + color: ${p => p.theme.gray300}; + gap: ${space(1)}; +`; + +const InfoWrapper = styled('div')<{isResolved: boolean}>` + padding: ${space(1)} 0; + display: flex; + justify-content: space-between; + gap: ${space(1)}; + background-color: ${p => + p.isResolved + ? 'linear-gradient(to right, rgba(235, 250, 246, 0.2) , rgb(235, 250, 246))0' + : p.theme.backgroundSecondary}; + color: ${p => p.theme.gray300}; +`; + +const PriorityWorkflowWrapper = styled('div')` + display: flex; + gap: ${space(2)}; +`; + +const Wrapper = styled('div')` + display: flex; + align-items: center; + gap: ${space(0.5)}; +`; + +const StyledAvatarList = styled(AvatarList)` + justify-content: flex-end; + padding-left: ${space(0.75)}; +`; diff --git a/static/app/views/issueDetails/updatedHeader.spec.tsx b/static/app/views/issueDetails/updatedHeader.spec.tsx new file mode 100644 index 0000000000000..5f5661d992141 --- /dev/null +++ b/static/app/views/issueDetails/updatedHeader.spec.tsx @@ -0,0 +1,101 @@ +import {GroupFixture} from 'sentry-fixture/group'; +import {OrganizationFixture} from 'sentry-fixture/organization'; +import {ProjectFixture} from 'sentry-fixture/project'; +import {TeamFixture} from 'sentry-fixture/team'; +import {UserFixture} from 'sentry-fixture/user'; + +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import type {TeamParticipant, UserParticipant} from 'sentry/types'; +import {IssueCategory} from 'sentry/types'; +import StreamlinedGroupHeader from 'sentry/views/issueDetails/streamlinedHeader'; +import {ReprocessingStatus} from 'sentry/views/issueDetails/utils'; + +describe('UpdatedGroupHeader', () => { + const baseUrl = 'BASE_URL/'; + const organization = OrganizationFixture(); + const project = ProjectFixture({ + platform: 'javascript', + teams: [TeamFixture()], + }); + const group = GroupFixture({issueCategory: IssueCategory.ERROR}); + + describe('JS Project Error Issue', () => { + const defaultProps = { + organization, + baseUrl, + groupReprocessingStatus: ReprocessingStatus.NO_STATUS, + project, + }; + + beforeEach(() => { + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/issues/${group.id}/first-last-release/`, + method: 'GET', + body: {}, + }); + + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/replay-count/', + body: {}, + }); + }); + + it('shows all elements of header', async () => { + const teams: TeamParticipant[] = [{...TeamFixture(), type: 'team'}]; + const users: UserParticipant[] = [ + { + ...UserFixture({ + id: '2', + name: 'John Smith', + email: 'johnsmith@example.com', + }), + type: 'user', + }, + { + ...UserFixture({ + id: '3', + name: 'Sohn Jmith', + email: 'sohnjmith@example.com', + }), + type: 'user', + }, + ]; + + const participantGroup = { + ...group, + participants: [...teams, ...users], + seenBy: users, + }; + + render( + , + { + organization, + } + ); + + expect(await screen.findByText('RequestError')).toBeInTheDocument(); + + expect(await screen.findByText('First Seen in')).toBeInTheDocument(); + expect(await screen.findByText('Last Seen in')).toBeInTheDocument(); + + expect( + await screen.findByRole('button', {name: 'Modify issue priority'}) + ).toBeInTheDocument(); + expect( + await screen.findByRole('button', {name: 'Modify issue assignee'}) + ).toBeInTheDocument(); + + expect(await screen.findByText('Participants')).toBeInTheDocument(); + expect(await screen.findByText('Viewers')).toBeInTheDocument(); + + expect(await screen.findByRole('button', {name: 'Resolve'})).toBeInTheDocument(); + expect(await screen.findByRole('button', {name: 'Archive'})).toBeInTheDocument(); + }); + }); +}); diff --git a/static/app/views/issueDetails/useIssueDetailsHeader.tsx b/static/app/views/issueDetails/useIssueDetailsHeader.tsx new file mode 100644 index 0000000000000..2256fa19a2758 --- /dev/null +++ b/static/app/views/issueDetails/useIssueDetailsHeader.tsx @@ -0,0 +1,90 @@ +import {useMemo} from 'react'; + +import type {Group, Project} from 'sentry/types'; +import {getMessage} from 'sentry/utils/events'; +import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import {Tab} from 'sentry/views/issueDetails/types'; +import {ReprocessingStatus} from 'sentry/views/issueDetails/utils'; + +import {ShortIdBreadcrumb} from './shortIdBreadcrumb'; + +interface IssueDetailsHeaderProps { + baseUrl: string; + group: Group; + groupReprocessingStatus: ReprocessingStatus; + project: Project; +} + +export function useIssueDetailsHeader({ + group, + groupReprocessingStatus, + baseUrl, + project, +}: IssueDetailsHeaderProps) { + const location = useLocation(); + const organization = useOrganization(); + const {sort: _sort, ...query} = location.query; + + const disabledTabs = useMemo(() => { + if (groupReprocessingStatus === ReprocessingStatus.REPROCESSING) { + return [ + Tab.ACTIVITY, + Tab.USER_FEEDBACK, + Tab.ATTACHMENTS, + Tab.EVENTS, + Tab.MERGED, + Tab.SIMILAR_ISSUES, + Tab.TAGS, + ]; + } + + if (groupReprocessingStatus === ReprocessingStatus.REPROCESSED_AND_HASNT_EVENT) { + return [ + Tab.DETAILS, + Tab.ATTACHMENTS, + Tab.EVENTS, + Tab.MERGED, + Tab.SIMILAR_ISSUES, + Tab.TAGS, + Tab.USER_FEEDBACK, + ]; + } + + return []; + }, [groupReprocessingStatus]); + + const disableActions = !!disabledTabs.length; + + const message = getMessage(group); + + const eventRoute = useMemo(() => { + return { + pathname: `${baseUrl}events/`, + query, + }; + }, [query, baseUrl]); + + const shortIdBreadcrumb = ( + + ); + + let className = 'group-detail'; + + if (group.hasSeen) { + className += ' hasSeen'; + } + + if (group.status === 'resolved') { + className += ' isResolved'; + } + + return { + disabledTabs, + message, + eventRoute, + disableActions, + shortIdBreadcrumb, + className, + }; +} From 3ca31a56701e2b3bd2f898aa905d2b0250ea0650 Mon Sep 17 00:00:00 2001 From: Shruthi Date: Wed, 31 Jul 2024 12:59:31 -0400 Subject: [PATCH 05/52] feat(discover): Add transaction dataset as an option for top events (#75347) Add transactions dataset to the allowlist for top n queries --- src/sentry/api/endpoints/organization_events_stats.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index cb38407cce3ad..afee8887794d0 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -22,6 +22,7 @@ metrics_performance, spans_indexed, spans_metrics, + transactions, ) from sentry.snuba.metrics.extraction import MetricSpecType from sentry.snuba.referrer import Referrer @@ -235,6 +236,7 @@ def get(self, request: Request, organization: Organization) -> Response: spans_indexed, spans_metrics, errors, + transactions, ] else discover ) From 679ee118f072ff6aa3cc7642f0d6c04a702d8f77 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 31 Jul 2024 13:08:32 -0400 Subject: [PATCH 06/52] ref: fill out TypeVars for ForeignKey (#75228) this fixes a class of problem when upgrading to django-stubs 4.0.3 where `_ST` becomes unbound --- pyproject.toml | 1 + requirements-dev-frozen.txt | 2 +- requirements-dev.txt | 2 +- src/sentry/db/models/fields/foreignkey.py | 4 +++- 4 files changed, 6 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 488c1d7f5a65b..eb3509c242fcd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -501,6 +501,7 @@ module = [ "sentry.buffer.*", "sentry.build.*", "sentry.db.models.fields.citext", + "sentry.db.models.fields.foreignkey", "sentry.db.models.fields.hybrid_cloud_foreign_key", "sentry.db.models.fields.types", "sentry.db.models.manager.*", diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index c86ceaf1c1813..e12e65fbe30d1 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -180,7 +180,7 @@ selenium==4.16.0 sentry-arroyo==2.16.5 sentry-cli==2.16.0 sentry-devenv==1.7.0 -sentry-forked-django-stubs==5.0.2.post8 +sentry-forked-django-stubs==5.0.2.post10 sentry-forked-djangorestframework-stubs==3.15.0.post1 sentry-kafka-schemas==0.1.102 sentry-ophio==0.2.7 diff --git a/requirements-dev.txt b/requirements-dev.txt index ae15e0f26195f..ee2769da44691 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -35,7 +35,7 @@ pip-tools>=7.1.0 packaging>=21.3 # for type checking -sentry-forked-django-stubs>=5.0.2.post8 +sentry-forked-django-stubs>=5.0.2.post10 sentry-forked-djangorestframework-stubs>=3.15.0.post1 lxml-stubs msgpack-types>=0.2.0 diff --git a/src/sentry/db/models/fields/foreignkey.py b/src/sentry/db/models/fields/foreignkey.py index ec0d9d39c553d..c6fee7d566d98 100644 --- a/src/sentry/db/models/fields/foreignkey.py +++ b/src/sentry/db/models/fields/foreignkey.py @@ -5,10 +5,12 @@ from django.db import models from django.db.models import ForeignKey +from sentry.db.models.fields.types import FieldGetType, FieldSetType + __all__ = ("FlexibleForeignKey",) -class FlexibleForeignKey(ForeignKey): +class FlexibleForeignKey(ForeignKey[FieldSetType, FieldGetType]): def __init__(self, *args: Any, **kwargs: Any): kwargs.setdefault("on_delete", models.CASCADE) super().__init__(*args, **kwargs) From ff84999f77827f81b3eef2aabc119f032e72313f Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 31 Jul 2024 13:24:24 -0400 Subject: [PATCH 07/52] ref: fix mypy ignored errors checker in CI (#75348) bash sucks, these are the errors that snuck through in ignored files that the ci check was supposed to prevent: ``` src/sentry/tagstore/snuba/backend.py:1179: error: Incompatible default for argument "dataset" (default has type "None", argument has type "Dataset") [assignment] src/sentry/testutils/cases.py:3253: error: Incompatible default for argument "scheduled_check_time" (default has type "None", argument has type "datetime") [assignment] src/sentry/testutils/cases.py:3290: error: Incompatible default for argument "timestamp" (default has type "None", argument has type "datetime") [assignment] src/sentry/testutils/cases.py:3291: error: Incompatible default for argument "duration" (default has type "None", argument has type "timedelta") [assignment] src/sentry/integrations/github/integration.py:482: error: Incompatible return value type (got "HttpResponseBase", expected "HttpResponse") [return-value] src/sentry/integrations/github/integration.py:507: error: Incompatible return value type (got "HttpResponseBase", expected "HttpResponse") [return-value] src/sentry/web/frontend/auth_login.py:280: error: Incompatible return value type (got "HttpResponseBase", expected "HttpResponse") [return-value] ``` --- .github/workflows/backend.yml | 1 + src/sentry/integrations/github/integration.py | 6 +++--- src/sentry/tagstore/snuba/backend.py | 2 +- src/sentry/testutils/cases.py | 8 ++++---- src/sentry/web/frontend/auth_login.py | 4 ++-- 5 files changed, 11 insertions(+), 10 deletions(-) diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 67292e6cbfa94..53b7908fb65fd 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -325,6 +325,7 @@ jobs: - run: | # mypy does not have granular codes so don't allow specific messages to regress + set -euo pipefail ! grep "'Settings' object has no attribute" .artifacts/mypy-all ! grep 'Cannot override class variable' .artifacts/mypy-all ! grep 'Exception type must be derived from BaseException' .artifacts/mypy-all diff --git a/src/sentry/integrations/github/integration.py b/src/sentry/integrations/github/integration.py index e47b12b2a26b1..ec8285fb784b5 100644 --- a/src/sentry/integrations/github/integration.py +++ b/src/sentry/integrations/github/integration.py @@ -6,7 +6,7 @@ from typing import Any from urllib.parse import parse_qsl -from django.http import HttpResponse +from django.http.response import HttpResponseBase from django.urls import reverse from django.utils.text import slugify from django.utils.translation import gettext_lazy as _ @@ -384,7 +384,7 @@ def setup(self) -> None: class OAuthLoginView(PipelineView): - def dispatch(self, request: Request, pipeline) -> HttpResponse: + def dispatch(self, request: Request, pipeline) -> HttpResponseBase: self.determine_active_organization(request) ghip = GitHubIdentityProvider() @@ -441,7 +441,7 @@ def get_app_url(self) -> str: name = options.get("github-app.name") return f"https://github.com/apps/{slugify(name)}" - def dispatch(self, request: Request, pipeline: Pipeline) -> HttpResponse: + def dispatch(self, request: Request, pipeline: Pipeline) -> HttpResponseBase: installation_id = request.GET.get( "installation_id", pipeline.fetch_state("installation_id") ) diff --git a/src/sentry/tagstore/snuba/backend.py b/src/sentry/tagstore/snuba/backend.py index bba8957ce9f34..e7839bd3622f8 100644 --- a/src/sentry/tagstore/snuba/backend.py +++ b/src/sentry/tagstore/snuba/backend.py @@ -1176,7 +1176,7 @@ def get_tag_value_paginator_for_projects( key, start=None, end=None, - dataset: Dataset = None, + dataset: Dataset | None = None, query: str | None = None, order_by="-last_seen", include_transactions: bool = False, diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index b23a24a478b32..7a075c070fb38 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -3250,7 +3250,7 @@ def create_uptime_result( self, subscription_id: str | None = None, status: str = CHECKSTATUS_FAILURE, - scheduled_check_time: datetime = None, + scheduled_check_time: datetime | None = None, ) -> CheckResult: if subscription_id is None: subscription_id = uuid.uuid4().hex @@ -3287,9 +3287,9 @@ class SpanTestCase(BaseTestCase): def load_data( self, platform: str = "transaction", - timestamp: datetime = None, - duration: timedelta = None, - **kwargs: dict[str, Any], + timestamp: datetime | None = None, + duration: timedelta | None = None, + **kwargs: Any, ) -> dict[str | int, Any]: if timestamp is None: timestamp = self.ten_mins_ago diff --git a/src/sentry/web/frontend/auth_login.py b/src/sentry/web/frontend/auth_login.py index a8208afe140c1..6bcabd0295395 100644 --- a/src/sentry/web/frontend/auth_login.py +++ b/src/sentry/web/frontend/auth_login.py @@ -213,7 +213,7 @@ def get_login_page(self, request: Request, **kwargs) -> HttpResponse: ) return self.respond_login(request=request, context=context, **kwargs) - def post(self, request: Request, **kwargs) -> HttpResponse: + def post(self, request: Request, **kwargs) -> HttpResponseBase: op = request.POST.get("op") if op == "sso" and request.POST.get("organization"): return self.redirect_post_to_sso(request=request) @@ -265,7 +265,7 @@ def get_auth_provider_if_exists(self, org_slug: str) -> AuthProvider | None: def handle_register_form_submit( self, request: Request, organization: RpcOrganization, **kwargs - ) -> HttpResponse: + ) -> HttpResponseBase: """ Validates a completed register form, redirecting to the next step or returning the form with its errors displayed. From 391346dc027027db775779cea2c1738e3925f813 Mon Sep 17 00:00:00 2001 From: Jodi Jang <116035587+jangjodi@users.noreply.github.com> Date: Wed, 31 Jul 2024 10:24:55 -0700 Subject: [PATCH 08/52] fix(similarity): Filter out null byte from record data (#75295) Filter out null byte from `exception_type` sent to seer record --- src/sentry/grouping/ingest/seer.py | 7 ++++--- src/sentry/seer/similarity/utils.py | 6 +++--- src/sentry/tasks/embeddings_grouping/utils.py | 9 ++++++--- tests/sentry/seer/similarity/test_utils.py | 8 ++++---- 4 files changed, 17 insertions(+), 13 deletions(-) diff --git a/src/sentry/grouping/ingest/seer.py b/src/sentry/grouping/ingest/seer.py index 179a9de0d9385..6d951d6eeae0b 100644 --- a/src/sentry/grouping/ingest/seer.py +++ b/src/sentry/grouping/ingest/seer.py @@ -14,7 +14,7 @@ from sentry.seer.similarity.types import SeerSimilarIssuesMetadata, SimilarIssuesEmbeddingsRequest from sentry.seer.similarity.utils import ( event_content_is_seer_eligible, - filter_null_from_event_title, + filter_null_from_string, get_stacktrace_string, killswitch_enabled, ) @@ -203,14 +203,15 @@ def get_seer_similar_issues( stacktrace_string = get_stacktrace_string( get_grouping_info_from_variants(primary_hashes.variants) ) + exception_type = get_path(event.data, "exception", "values", -1, "type") request_data: SimilarIssuesEmbeddingsRequest = { "event_id": event.event_id, "hash": event_hash, "project_id": event.project.id, "stacktrace": stacktrace_string, - "message": filter_null_from_event_title(event.title), - "exception_type": get_path(event.data, "exception", "values", -1, "type"), + "message": filter_null_from_string(event.title), + "exception_type": filter_null_from_string(exception_type) if exception_type else None, "k": num_neighbors, "referrer": "ingest", } diff --git a/src/sentry/seer/similarity/utils.py b/src/sentry/seer/similarity/utils.py index d602adcb83895..5891a4f6dfda7 100644 --- a/src/sentry/seer/similarity/utils.py +++ b/src/sentry/seer/similarity/utils.py @@ -182,11 +182,11 @@ def killswitch_enabled(project_id: int, event: Event | None = None) -> bool: return False -def filter_null_from_event_title(title: str) -> str: +def filter_null_from_string(string: str) -> str: """ - Filter out null bytes from event title so that it can be saved in records table. + Filter out null bytes from string so that it can be saved in records table. """ - return title.replace("\x00", "") + return string.replace("\x00", "") T = TypeVar("T", dict[str, Any], str) diff --git a/src/sentry/tasks/embeddings_grouping/utils.py b/src/sentry/tasks/embeddings_grouping/utils.py index 33d5e05c355f2..b1822330a6629 100644 --- a/src/sentry/tasks/embeddings_grouping/utils.py +++ b/src/sentry/tasks/embeddings_grouping/utils.py @@ -30,7 +30,7 @@ SeerSimilarIssueData, SimilarGroupNotFoundError, ) -from sentry.seer.similarity.utils import filter_null_from_event_title, get_stacktrace_string +from sentry.seer.similarity.utils import filter_null_from_string, get_stacktrace_string from sentry.snuba.dataset import Dataset from sentry.snuba.referrer import Referrer from sentry.utils import json, metrics @@ -310,12 +310,15 @@ def get_events_from_nodestore( invalid_event_group_ids.append(group_id) continue + exception_type = get_path(event.data, "exception", "values", -1, "type") group_data.append( CreateGroupingRecordData( group_id=group_id, project_id=project.id, - message=filter_null_from_event_title(event.title), - exception_type=get_path(event.data, "exception", "values", -1, "type"), + message=filter_null_from_string(event.title), + exception_type=filter_null_from_string(exception_type) + if exception_type + else None, hash=primary_hash, ) ) diff --git a/tests/sentry/seer/similarity/test_utils.py b/tests/sentry/seer/similarity/test_utils.py index c206abd489425..9cd4b22f742a6 100644 --- a/tests/sentry/seer/similarity/test_utils.py +++ b/tests/sentry/seer/similarity/test_utils.py @@ -8,7 +8,7 @@ SEER_ELIGIBLE_PLATFORMS, _is_snipped_context_line, event_content_is_seer_eligible, - filter_null_from_event_title, + filter_null_from_string, get_stacktrace_string, ) from sentry.testutils.cases import TestCase @@ -786,6 +786,6 @@ def test_platform_filter(self): class SeerUtilsTest(TestCase): - def test_filter_null_from_event_title(self): - title_with_null = 'Title with null \x00, "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" is null' - assert filter_null_from_event_title(title_with_null) == 'Title with null , "" is null' + def test_filter_null_from_string(self): + string_with_null = 'String with null \x00, "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" is null' + assert filter_null_from_string(string_with_null) == 'String with null , "" is null' From d17d0b0386d2df01d85836960571afc3b1d7855e Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 31 Jul 2024 13:29:51 -0400 Subject: [PATCH 09/52] ref: move organization absolute url mixin functionality to sentry.organizations (#75340) this is step 1 -- need to do some cleanup in getsentry once these functions are available and then I can kill the organization absolute_url mixin which isn't typesafe --- src/sentry/api/base.py | 2 +- .../endpoints/project_profiling_profile.py | 2 +- .../api/serializers/models/auth_provider.py | 6 +- .../api/serializers/models/organization.py | 3 +- src/sentry/api/utils.py | 59 +---------- src/sentry/auth/helper.py | 2 +- src/sentry/integrations/github/integration.py | 2 +- src/sentry/integrations/pipeline.py | 19 ++-- src/sentry/middleware/customer_domain.py | 2 +- src/sentry/organizations/absolute_url.py | 99 +++++++++++++++++++ src/sentry/types/organization.py | 53 ++-------- src/sentry/utils/auth.py | 3 +- src/sentry/web/client_config.py | 3 +- src/sentry/web/frontend/auth_login.py | 2 +- src/sentry/web/frontend/base.py | 3 +- src/sentry/web/frontend/pipeline_advancer.py | 2 +- src/sentry/web/frontend/react_page.py | 2 +- tests/sentry/api/test_utils.py | 70 ------------- .../integrations/github/test_integration.py | 2 +- tests/sentry/integrations/test_pipeline.py | 2 +- .../sentry/organizations/test_absolute_url.py | 73 ++++++++++++++ 21 files changed, 212 insertions(+), 199 deletions(-) create mode 100644 src/sentry/organizations/absolute_url.py create mode 100644 tests/sentry/organizations/test_absolute_url.py diff --git a/src/sentry/api/base.py b/src/sentry/api/base.py index 034504d1b2fd1..3358b50d76670 100644 --- a/src/sentry/api/base.py +++ b/src/sentry/api/base.py @@ -32,6 +32,7 @@ from sentry.auth import access from sentry.auth.staff import has_staff_option from sentry.models.environment import Environment +from sentry.organizations.absolute_url import generate_organization_url from sentry.ratelimits.config import DEFAULT_RATE_LIMIT_CONFIG, RateLimitConfig from sentry.silo.base import SiloLimit, SiloMode from sentry.types.ratelimit import RateLimit, RateLimitCategory @@ -65,7 +66,6 @@ SuperuserOrStaffFeatureFlaggedPermission, SuperuserPermission, ) -from .utils import generate_organization_url __all__ = [ "Endpoint", diff --git a/src/sentry/api/endpoints/project_profiling_profile.py b/src/sentry/api/endpoints/project_profiling_profile.py index 19beaae46db3d..87418b6e3187f 100644 --- a/src/sentry/api/endpoints/project_profiling_profile.py +++ b/src/sentry/api/endpoints/project_profiling_profile.py @@ -13,10 +13,10 @@ from sentry.api.base import region_silo_endpoint from sentry.api.bases.project import ProjectEndpoint from sentry.api.serializers import serialize -from sentry.api.utils import generate_organization_url from sentry.exceptions import InvalidSearchQuery from sentry.models.project import Project from sentry.models.release import Release +from sentry.organizations.absolute_url import generate_organization_url from sentry.profiles.utils import ( get_from_profiling_service, parse_profile_filters, diff --git a/src/sentry/api/serializers/models/auth_provider.py b/src/sentry/api/serializers/models/auth_provider.py index 076975d655d1b..2cf6567ab93f6 100644 --- a/src/sentry/api/serializers/models/auth_provider.py +++ b/src/sentry/api/serializers/models/auth_provider.py @@ -8,8 +8,8 @@ from sentry.models.authprovider import AuthProvider from sentry.models.organization import Organization from sentry.models.organizationmember import OrganizationMember +from sentry.organizations.absolute_url import organization_absolute_url from sentry.organizations.services.organization.model import RpcOrganization -from sentry.types.organization import OrganizationAbsoluteUrlMixin @register(AuthProvider) @@ -28,8 +28,8 @@ def serialize( login_url = Organization.get_url(organization.slug) - absolute_login_url = OrganizationAbsoluteUrlMixin.organization_absolute_url( - features.has("system:multi-region"), + absolute_login_url = organization_absolute_url( + has_customer_domain=features.has("system:multi-region"), slug=organization.slug, path=login_url, ) diff --git a/src/sentry/api/serializers/models/organization.py b/src/sentry/api/serializers/models/organization.py index 32dc0fb53cc09..f9b603278075a 100644 --- a/src/sentry/api/serializers/models/organization.py +++ b/src/sentry/api/serializers/models/organization.py @@ -23,7 +23,7 @@ ) from sentry.api.serializers.models.team import TeamSerializerResponse from sentry.api.serializers.types import OrganizationSerializerResponse -from sentry.api.utils import generate_organization_url, generate_region_url +from sentry.api.utils import generate_region_url from sentry.auth.access import Access from sentry.auth.services.auth import RpcOrganizationAuthConfig, auth_service from sentry.constants import ( @@ -64,6 +64,7 @@ from sentry.models.project import Project from sentry.models.team import Team, TeamStatus from sentry.models.user import User +from sentry.organizations.absolute_url import generate_organization_url from sentry.organizations.services.organization import RpcOrganizationSummary from sentry.users.services.user.service import user_service diff --git a/src/sentry/api/utils.py b/src/sentry/api/utils.py index 3d45d43b06a8a..804c7429bdb3d 100644 --- a/src/sentry/api/utils.py +++ b/src/sentry/api/utils.py @@ -2,14 +2,12 @@ import datetime import logging -import re import sys import traceback from collections.abc import Generator, Mapping, MutableMapping from contextlib import contextmanager from datetime import timedelta from typing import Any, Literal, overload -from urllib.parse import urlparse import sentry_sdk from django.conf import settings @@ -31,6 +29,10 @@ from sentry.models.apitoken import is_api_token_auth from sentry.models.organization import Organization from sentry.models.orgauthtoken import is_org_auth_token_auth +from sentry.organizations.absolute_url import ( # noqa: F401 # XXX: for compatibility, remove after getsentry is updated + customer_domain_path, + generate_organization_url, +) from sentry.organizations.services.organization import ( RpcOrganization, RpcOrganizationMember, @@ -271,25 +273,6 @@ def is_member_disabled_from_limit( return member.flags.member_limit__restricted -def generate_organization_hostname(org_slug: str) -> str: - url_prefix_hostname: str = urlparse(options.get("system.url-prefix")).netloc - org_base_hostname_template: str = options.get("system.organization-base-hostname") - if not org_base_hostname_template: - return url_prefix_hostname - has_org_slug_placeholder = "{slug}" in org_base_hostname_template - if not has_org_slug_placeholder: - return url_prefix_hostname - org_hostname = org_base_hostname_template.replace("{slug}", org_slug) - return org_hostname - - -def generate_organization_url(org_slug: str) -> str: - org_url_template: str = options.get("system.organization-url-template") - if not org_url_template: - return options.get("system.url-prefix") - return org_url_template.replace("{hostname}", generate_organization_hostname(org_slug)) - - def generate_region_url(region_name: str | None = None) -> str: region_url_template: str | None = options.get("system.region-api-url-template") if region_name is None and SiloMode.get_current_mode() == SiloMode.REGION: @@ -305,40 +288,6 @@ def generate_region_url(region_name: str | None = None) -> str: return region_url_template.replace("{region}", region_name) -_path_patterns: list[tuple[re.Pattern[str], str]] = [ - # /organizations/slug/section, but not /organizations/new - (re.compile(r"\/?organizations\/(?!new)[^\/]+\/(.*)"), r"/\1"), - # For /settings/:orgId/ -> /settings/organization/ - ( - re.compile(r"\/settings\/(?!account\/|!billing\/|projects\/|teams)[^\/]+\/?$"), - "/settings/organization/", - ), - # Move /settings/:orgId/:section -> /settings/:section - # but not /settings/organization or /settings/projects which is a new URL - ( - re.compile(r"^\/?settings\/(?!account\/|billing\/|projects\/|teams)[^\/]+\/(.*)"), - r"/settings/\1", - ), - (re.compile(r"^\/?join-request\/[^\/]+\/?.*"), r"/join-request/"), - (re.compile(r"^\/?onboarding\/[^\/]+\/(.*)"), r"/onboarding/\1"), - ( - re.compile(r"^\/?(?!settings)[^\/]+\/([^\/]+)\/getting-started\/(.*)"), - r"/getting-started/\1/\2", - ), -] - - -def customer_domain_path(path: str) -> str: - """ - Server side companion to path normalizations found in withDomainRequired - """ - for pattern, replacement in _path_patterns: - updated = pattern.sub(replacement, path) - if updated != path: - return updated - return path - - def method_dispatch(**dispatch_mapping): """ Dispatches an incoming request to a different handler based on the HTTP method diff --git a/src/sentry/auth/helper.py b/src/sentry/auth/helper.py index b6ec5696417cf..1c6d1c51ea2cc 100644 --- a/src/sentry/auth/helper.py +++ b/src/sentry/auth/helper.py @@ -25,7 +25,6 @@ from sentry import audit_log, features from sentry.api.invite_helper import ApiInviteHelper, remove_invite_details_from_session -from sentry.api.utils import generate_organization_url from sentry.audit_log.services.log import AuditLogEvent, log_service from sentry.auth.email import AmbiguousUserFromEmail, resolve_email_to_user from sentry.auth.exceptions import IdentityNotValid @@ -42,6 +41,7 @@ from sentry.models.authprovider import AuthProvider from sentry.models.outbox import outbox_context from sentry.models.user import User +from sentry.organizations.absolute_url import generate_organization_url from sentry.organizations.services.organization import ( RpcOrganization, RpcOrganizationFlagsUpdate, diff --git a/src/sentry/integrations/github/integration.py b/src/sentry/integrations/github/integration.py index ec8285fb784b5..bdec82a186088 100644 --- a/src/sentry/integrations/github/integration.py +++ b/src/sentry/integrations/github/integration.py @@ -13,7 +13,6 @@ from rest_framework.request import Request from sentry import features, options -from sentry.api.utils import generate_organization_url from sentry.constants import ObjectStatus from sentry.http import safe_urlopen, safe_urlread from sentry.identity.github import GitHubIdentityProvider, get_user_info @@ -31,6 +30,7 @@ from sentry.integrations.services.repository import RpcRepository, repository_service from sentry.integrations.utils.code_mapping import RepoTree from sentry.models.repository import Repository +from sentry.organizations.absolute_url import generate_organization_url from sentry.organizations.services.organization import RpcOrganizationSummary, organization_service from sentry.pipeline import Pipeline, PipelineView from sentry.shared_integrations.constants import ERR_INTERNAL, ERR_UNAUTHORIZED diff --git a/src/sentry/integrations/pipeline.py b/src/sentry/integrations/pipeline.py index 4e2e922093da8..12d9aad62690b 100644 --- a/src/sentry/integrations/pipeline.py +++ b/src/sentry/integrations/pipeline.py @@ -1,32 +1,29 @@ from __future__ import annotations -from django.http import HttpResponseRedirect - -from sentry import features -from sentry.api.utils import generate_organization_url -from sentry.models.organizationmapping import OrganizationMapping -from sentry.silo.base import SiloMode - -__all__ = ["IntegrationPipeline"] - import logging from django.db import IntegrityError +from django.http import HttpResponseRedirect from django.utils import timezone from django.utils.translation import gettext as _ +from sentry import features from sentry.api.serializers import serialize from sentry.constants import ObjectStatus +from sentry.integrations.manager import default_manager from sentry.integrations.models.integration import Integration from sentry.integrations.models.organization_integration import OrganizationIntegration from sentry.models.identity import Identity, IdentityProvider, IdentityStatus +from sentry.models.organizationmapping import OrganizationMapping +from sentry.organizations.absolute_url import generate_organization_url from sentry.pipeline import Pipeline, PipelineAnalyticsEntry from sentry.shared_integrations.exceptions import IntegrationError, IntegrationProviderError +from sentry.silo.base import SiloMode from sentry.web.helpers import render_to_response -logger = logging.getLogger(__name__) +__all__ = ["IntegrationPipeline"] -from sentry.integrations.manager import default_manager +logger = logging.getLogger(__name__) def ensure_integration(key, data): diff --git a/src/sentry/middleware/customer_domain.py b/src/sentry/middleware/customer_domain.py index 125b77752fc95..8a81a14c2e753 100644 --- a/src/sentry/middleware/customer_domain.py +++ b/src/sentry/middleware/customer_domain.py @@ -11,7 +11,7 @@ from django.urls import resolve, reverse from sentry import features -from sentry.api.utils import generate_organization_url +from sentry.organizations.absolute_url import generate_organization_url from sentry.organizations.services.organization import organization_service from sentry.types.region import subdomain_is_region from sentry.utils import auth diff --git a/src/sentry/organizations/absolute_url.py b/src/sentry/organizations/absolute_url.py new file mode 100644 index 0000000000000..0cc4794386230 --- /dev/null +++ b/src/sentry/organizations/absolute_url.py @@ -0,0 +1,99 @@ +from __future__ import annotations + +import re +from urllib.parse import urlparse + +from sentry import features, options +from sentry.app import env +from sentry.utils.http import absolute_uri, is_using_customer_domain + +_path_patterns: list[tuple[re.Pattern[str], str]] = [ + # /organizations/slug/section, but not /organizations/new + (re.compile(r"\/?organizations\/(?!new)[^\/]+\/(.*)"), r"/\1"), + # For /settings/:orgId/ -> /settings/organization/ + ( + re.compile(r"\/settings\/(?!account\/|!billing\/|projects\/|teams)[^\/]+\/?$"), + "/settings/organization/", + ), + # Move /settings/:orgId/:section -> /settings/:section + # but not /settings/organization or /settings/projects which is a new URL + ( + re.compile(r"^\/?settings\/(?!account\/|billing\/|projects\/|teams)[^\/]+\/(.*)"), + r"/settings/\1", + ), + (re.compile(r"^\/?join-request\/[^\/]+\/?.*"), r"/join-request/"), + (re.compile(r"^\/?onboarding\/[^\/]+\/(.*)"), r"/onboarding/\1"), + ( + re.compile(r"^\/?(?!settings)[^\/]+\/([^\/]+)\/getting-started\/(.*)"), + r"/getting-started/\1/\2", + ), +] + + +def customer_domain_path(path: str) -> str: + """ + Server side companion to path normalizations found in withDomainRequired + """ + for pattern, replacement in _path_patterns: + updated = pattern.sub(replacement, path) + if updated != path: + return updated + return path + + +def _generate_organization_hostname(org_slug: str) -> str: + url_prefix_hostname: str = urlparse(options.get("system.url-prefix")).netloc + org_base_hostname_template: str = options.get("system.organization-base-hostname") + if not org_base_hostname_template: + return url_prefix_hostname + has_org_slug_placeholder = "{slug}" in org_base_hostname_template + if not has_org_slug_placeholder: + return url_prefix_hostname + org_hostname = org_base_hostname_template.replace("{slug}", org_slug) + return org_hostname + + +def generate_organization_url(org_slug: str) -> str: + org_url_template: str = options.get("system.organization-url-template") + if not org_url_template: + return options.get("system.url-prefix") + return org_url_template.replace("{hostname}", _generate_organization_hostname(org_slug)) + + +def has_customer_domain() -> bool: + return ( + # XXX: this accesses a sneaky global + (env.request is not None and is_using_customer_domain(env.request)) + or features.has("system:multi-region") + ) + + +def organization_absolute_url( + *, + has_customer_domain: bool, + slug: str, + path: str, + query: str | None = None, + fragment: str | None = None, +) -> str: + """ + Get an absolute URL to `path` for this organization. + + This method takes customer-domains into account and will update the path when + customer-domains are active. + """ + url_base = None + if has_customer_domain: + path = customer_domain_path(path) + url_base = generate_organization_url(slug) + uri = absolute_uri(path, url_prefix=url_base) + parts = [uri] + if query and not query.startswith("?"): + query = f"?{query}" + if query: + parts.append(query) + if fragment and not fragment.startswith("#"): + fragment = f"#{fragment}" + if fragment: + parts.append(fragment) + return "".join(parts) diff --git a/src/sentry/types/organization.py b/src/sentry/types/organization.py index 9e65a98f0866b..9535b4c64b184 100644 --- a/src/sentry/types/organization.py +++ b/src/sentry/types/organization.py @@ -4,9 +4,7 @@ from django.db import models -from sentry import features -from sentry.app import env -from sentry.utils.http import is_using_customer_domain +from sentry.organizations.absolute_url import has_customer_domain, organization_absolute_url class OrganizationAbsoluteUrlMixin: @@ -17,12 +15,7 @@ def __has_customer_domain(self) -> bool: """ Check if the current organization is using or has access to customer domains. """ - - request = env.request - if request and is_using_customer_domain(request): - return True - - return features.has("system:multi-region") + return has_customer_domain() def _has_customer_domain(self) -> bool: # For getsentry compatibility @@ -35,40 +28,10 @@ def absolute_url(self, path: str, query: str | None = None, fragment: str | None This method takes customer-domains into account and will update the path when customer-domains are active. """ - return self.organization_absolute_url( - self.__has_customer_domain, self.slug, path=path, query=query, fragment=fragment + return organization_absolute_url( + has_customer_domain=self.__has_customer_domain, + slug=self.slug, + path=path, + query=query, + fragment=fragment, ) - - @staticmethod - def organization_absolute_url( - has_customer_domain: bool, - slug: str, - path: str, - query: str | None = None, - fragment: str | None = None, - ) -> str: - """ - Get an absolute URL to `path` for this organization. - - This method takes customer-domains into account and will update the path when - customer-domains are active. - """ - # Avoid cycles. - from sentry.api.utils import customer_domain_path, generate_organization_url - from sentry.utils.http import absolute_uri - - url_base = None - if has_customer_domain: - path = customer_domain_path(path) - url_base = generate_organization_url(slug) - uri = absolute_uri(path, url_prefix=url_base) - parts = [uri] - if query and not query.startswith("?"): - query = f"?{query}" - if query: - parts.append(query) - if fragment and not fragment.startswith("#"): - fragment = f"#{fragment}" - if fragment: - parts.append(fragment) - return "".join(parts) diff --git a/src/sentry/utils/auth.py b/src/sentry/utils/auth.py index fd2bab81a0a0b..b7ec256b7b803 100644 --- a/src/sentry/utils/auth.py +++ b/src/sentry/utils/auth.py @@ -19,6 +19,7 @@ from sentry.models.organization import Organization from sentry.models.outbox import outbox_context from sentry.models.user import User +from sentry.organizations.absolute_url import generate_organization_url from sentry.organizations.services.organization import RpcOrganization from sentry.users.services.user import RpcUser from sentry.users.services.user.service import user_service @@ -190,8 +191,6 @@ def _get_login_redirect(request: HttpRequest, default: str | None = None) -> str def get_login_redirect(request: HttpRequest, default: str | None = None) -> str: - from sentry.api.utils import generate_organization_url - login_redirect = _get_login_redirect(request, default) url_prefix = None if hasattr(request, "subdomain") and request.subdomain: diff --git a/src/sentry/web/client_config.py b/src/sentry/web/client_config.py index c81951631ed94..9e62682e04bdb 100644 --- a/src/sentry/web/client_config.py +++ b/src/sentry/web/client_config.py @@ -16,12 +16,13 @@ import sentry from sentry import features, options -from sentry.api.utils import generate_organization_url, generate_region_url +from sentry.api.utils import generate_region_url from sentry.auth import superuser from sentry.auth.services.auth import AuthenticatedToken, AuthenticationContext from sentry.auth.superuser import is_active_superuser from sentry.models.organizationmapping import OrganizationMapping from sentry.models.user import User +from sentry.organizations.absolute_url import generate_organization_url from sentry.organizations.services.organization import ( RpcOrganization, RpcUserOrganizationContext, diff --git a/src/sentry/web/frontend/auth_login.py b/src/sentry/web/frontend/auth_login.py index 6bcabd0295395..4a91cd57c08e7 100644 --- a/src/sentry/web/frontend/auth_login.py +++ b/src/sentry/web/frontend/auth_login.py @@ -17,7 +17,6 @@ from sentry import features from sentry.api.invite_helper import ApiInviteHelper, remove_invite_details_from_session -from sentry.api.utils import generate_organization_url from sentry.auth.superuser import is_active_superuser from sentry.constants import WARN_SESSION_EXPIRED from sentry.http import get_server_hostname @@ -26,6 +25,7 @@ from sentry.models.organization import OrganizationStatus from sentry.models.organizationmapping import OrganizationMapping from sentry.models.user import User +from sentry.organizations.absolute_url import generate_organization_url from sentry.organizations.services.organization import RpcOrganization, organization_service from sentry.signals import join_request_link_viewed, user_signup from sentry.types.ratelimit import RateLimit, RateLimitCategory diff --git a/src/sentry/web/frontend/base.py b/src/sentry/web/frontend/base.py index eafb9f6391fba..ddd0012cb43d8 100644 --- a/src/sentry/web/frontend/base.py +++ b/src/sentry/web/frontend/base.py @@ -25,7 +25,7 @@ from rest_framework.request import Request from sentry import options -from sentry.api.utils import generate_organization_url, is_member_disabled_from_limit +from sentry.api.utils import is_member_disabled_from_limit from sentry.auth import access from sentry.auth.superuser import is_active_superuser from sentry.constants import ObjectStatus @@ -33,6 +33,7 @@ from sentry.models.avatars.base import AvatarBase from sentry.models.organization import Organization, OrganizationStatus from sentry.models.project import Project +from sentry.organizations.absolute_url import generate_organization_url from sentry.organizations.services.organization import ( RpcOrganization, RpcOrganizationSummary, diff --git a/src/sentry/web/frontend/pipeline_advancer.py b/src/sentry/web/frontend/pipeline_advancer.py index cfe97929605ff..f88a026352659 100644 --- a/src/sentry/web/frontend/pipeline_advancer.py +++ b/src/sentry/web/frontend/pipeline_advancer.py @@ -4,9 +4,9 @@ from django.urls import reverse from django.utils.translation import gettext_lazy as _ -from sentry.api.utils import generate_organization_url from sentry.identity.pipeline import IdentityProviderPipeline from sentry.integrations.pipeline import IntegrationPipeline +from sentry.organizations.absolute_url import generate_organization_url from sentry.utils.http import absolute_uri, create_redirect_url from sentry.web.frontend.base import BaseView diff --git a/src/sentry/web/frontend/react_page.py b/src/sentry/web/frontend/react_page.py index b8e00a8eaf81d..79b4b4b73d82c 100644 --- a/src/sentry/web/frontend/react_page.py +++ b/src/sentry/web/frontend/react_page.py @@ -12,7 +12,7 @@ from rest_framework.request import Request from sentry import features, options -from sentry.api.utils import customer_domain_path, generate_organization_url +from sentry.organizations.absolute_url import customer_domain_path, generate_organization_url from sentry.organizations.services.organization import organization_service from sentry.types.region import subdomain_is_region from sentry.users.services.user.model import RpcUser diff --git a/tests/sentry/api/test_utils.py b/tests/sentry/api/test_utils.py index ce0aa401e09eb..df2b98a91ca0c 100644 --- a/tests/sentry/api/test_utils.py +++ b/tests/sentry/api/test_utils.py @@ -9,7 +9,6 @@ from sentry.api.utils import ( MAX_STATS_PERIOD, - customer_domain_path, get_date_range_from_params, handle_query_errors, print_and_capture_handler_exception, @@ -164,75 +163,6 @@ def test_merges_handler_context_with_scope( assert capture_exception_scope_kwarg._tags == expected_scope_tags -def test_customer_domain_path(): - scenarios = [ - # Input, expected - ["/settings/", "/settings/"], - # Organization settings views. - ["/settings/acme/", "/settings/organization/"], - ["/settings/organization", "/settings/organization/"], - ["/settings/sentry/members/", "/settings/members/"], - ["/settings/sentry/members/3/", "/settings/members/3/"], - ["/settings/sentry/teams/peeps/", "/settings/teams/peeps/"], - ["/settings/sentry/billing/receipts/", "/settings/billing/receipts/"], - [ - "/settings/acme/developer-settings/release-bot/", - "/settings/developer-settings/release-bot/", - ], - # Settings views for orgs with acccount/billing in their slugs. - ["/settings/account-on/", "/settings/organization/"], - ["/settings/billing-co/", "/settings/organization/"], - ["/settings/account-on/integrations/", "/settings/integrations/"], - [ - "/settings/account-on/projects/billing-app/source-maps/", - "/settings/projects/billing-app/source-maps/", - ], - ["/settings/billing-co/integrations/", "/settings/integrations/"], - [ - "/settings/billing-co/projects/billing-app/source-maps/", - "/settings/projects/billing-app/source-maps/", - ], - # Account settings should stay the same - ["/settings/account/", "/settings/account/"], - ["/settings/account/security/", "/settings/account/security/"], - ["/settings/account/details/", "/settings/account/details/"], - ["/join-request/acme", "/join-request/"], - ["/join-request/acme/", "/join-request/"], - ["/onboarding/acme/", "/onboarding/"], - ["/onboarding/acme/project/", "/onboarding/project/"], - ["/organizations/new/", "/organizations/new/"], - ["/organizations/albertos-apples/issues/", "/issues/"], - ["/organizations/albertos-apples/issues/?_q=all#hash", "/issues/?_q=all#hash"], - ["/acme/project-slug/getting-started/", "/getting-started/project-slug/"], - [ - "/acme/project-slug/getting-started/python", - "/getting-started/project-slug/python", - ], - ["/settings/projects/python/filters/", "/settings/projects/python/filters/"], - ["/settings/projects/onboarding/abc123/", "/settings/projects/onboarding/abc123/"], - [ - "/settings/projects/join-request/abc123/", - "/settings/projects/join-request/abc123/", - ], - [ - "/settings/projects/python/filters/discarded/", - "/settings/projects/python/filters/discarded/", - ], - [ - "/settings/projects/getting-started/abc123/", - "/settings/projects/getting-started/abc123/", - ], - ["/settings/teams/peeps/", "/settings/teams/peeps/"], - ["/settings/billing/checkout/?_q=all#hash", "/settings/billing/checkout/?_q=all#hash"], - [ - "/settings/billing/bundle-checkout/?_q=all#hash", - "/settings/billing/bundle-checkout/?_q=all#hash", - ], - ] - for input_path, expected in scenarios: - assert expected == customer_domain_path(input_path) - - class FooBarError(Exception): pass diff --git a/tests/sentry/integrations/github/test_integration.py b/tests/sentry/integrations/github/test_integration.py index 9608774ff6f23..5f6c44fc229ca 100644 --- a/tests/sentry/integrations/github/test_integration.py +++ b/tests/sentry/integrations/github/test_integration.py @@ -14,7 +14,6 @@ import sentry from fixtures.github import INSTALLATION_EVENT_EXAMPLE -from sentry.api.utils import generate_organization_url from sentry.constants import ObjectStatus from sentry.integrations.github import ( API_ERRORS, @@ -29,6 +28,7 @@ from sentry.integrations.utils.code_mapping import Repo, RepoTree from sentry.models.project import Project from sentry.models.repository import Repository +from sentry.organizations.absolute_url import generate_organization_url from sentry.plugins.base import plugins from sentry.plugins.bases.issue2 import IssueTrackingPlugin2 from sentry.shared_integrations.exceptions import ApiError diff --git a/tests/sentry/integrations/test_pipeline.py b/tests/sentry/integrations/test_pipeline.py index fe5ed2fa4a575..d5e427373573d 100644 --- a/tests/sentry/integrations/test_pipeline.py +++ b/tests/sentry/integrations/test_pipeline.py @@ -2,7 +2,6 @@ from django.db import router -from sentry.api.utils import generate_organization_url from sentry.integrations.example import AliasedIntegrationProvider, ExampleIntegrationProvider from sentry.integrations.gitlab.integration import GitlabIntegrationProvider from sentry.integrations.models.integration import Integration @@ -10,6 +9,7 @@ from sentry.models.identity import Identity from sentry.models.organizationmapping import OrganizationMapping from sentry.models.repository import Repository +from sentry.organizations.absolute_url import generate_organization_url from sentry.plugins.base import plugins from sentry.plugins.bases.issue2 import IssuePlugin2 from sentry.signals import receivers_raise_on_send diff --git a/tests/sentry/organizations/test_absolute_url.py b/tests/sentry/organizations/test_absolute_url.py new file mode 100644 index 0000000000000..cdc2177d37952 --- /dev/null +++ b/tests/sentry/organizations/test_absolute_url.py @@ -0,0 +1,73 @@ +import pytest + +from sentry.organizations.absolute_url import customer_domain_path + + +@pytest.mark.parametrize( + ("input", "expected"), + ( + ("/settings/", "/settings/"), + # Organization settings views. + ("/settings/acme/", "/settings/organization/"), + ("/settings/organization", "/settings/organization/"), + ("/settings/sentry/members/", "/settings/members/"), + ("/settings/sentry/members/3/", "/settings/members/3/"), + ("/settings/sentry/teams/peeps/", "/settings/teams/peeps/"), + ("/settings/sentry/billing/receipts/", "/settings/billing/receipts/"), + ( + "/settings/acme/developer-settings/release-bot/", + "/settings/developer-settings/release-bot/", + ), + # Settings views for orgs with acccount/billing in their slugs. + ("/settings/account-on/", "/settings/organization/"), + ("/settings/billing-co/", "/settings/organization/"), + ("/settings/account-on/integrations/", "/settings/integrations/"), + ( + "/settings/account-on/projects/billing-app/source-maps/", + "/settings/projects/billing-app/source-maps/", + ), + ("/settings/billing-co/integrations/", "/settings/integrations/"), + ( + "/settings/billing-co/projects/billing-app/source-maps/", + "/settings/projects/billing-app/source-maps/", + ), + # Account settings should stay the same + ("/settings/account/", "/settings/account/"), + ("/settings/account/security/", "/settings/account/security/"), + ("/settings/account/details/", "/settings/account/details/"), + ("/join-request/acme", "/join-request/"), + ("/join-request/acme/", "/join-request/"), + ("/onboarding/acme/", "/onboarding/"), + ("/onboarding/acme/project/", "/onboarding/project/"), + ("/organizations/new/", "/organizations/new/"), + ("/organizations/albertos-apples/issues/", "/issues/"), + ("/organizations/albertos-apples/issues/?_q=all#hash", "/issues/?_q=all#hash"), + ("/acme/project-slug/getting-started/", "/getting-started/project-slug/"), + ( + "/acme/project-slug/getting-started/python", + "/getting-started/project-slug/python", + ), + ("/settings/projects/python/filters/", "/settings/projects/python/filters/"), + ("/settings/projects/onboarding/abc123/", "/settings/projects/onboarding/abc123/"), + ( + "/settings/projects/join-request/abc123/", + "/settings/projects/join-request/abc123/", + ), + ( + "/settings/projects/python/filters/discarded/", + "/settings/projects/python/filters/discarded/", + ), + ( + "/settings/projects/getting-started/abc123/", + "/settings/projects/getting-started/abc123/", + ), + ("/settings/teams/peeps/", "/settings/teams/peeps/"), + ("/settings/billing/checkout/?_q=all#hash", "/settings/billing/checkout/?_q=all#hash"), + ( + "/settings/billing/bundle-checkout/?_q=all#hash", + "/settings/billing/bundle-checkout/?_q=all#hash", + ), + ), +) +def test_customer_domain_path(input: str, expected: str) -> None: + assert expected == customer_domain_path(input) From d33fdfece2c3f1c2a167bdab230533428743e2ec Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 31 Jul 2024 13:32:20 -0400 Subject: [PATCH 10/52] ref: upgrade mypy (#75350) --- requirements-dev-frozen.txt | 2 +- requirements-dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index e12e65fbe30d1..11cfa8c477703 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -101,7 +101,7 @@ mmh3==4.0.0 more-itertools==8.13.0 msgpack==1.0.7 msgpack-types==0.2.0 -mypy==1.11.0 +mypy==1.11.1 mypy-extensions==1.0.0 nodeenv==1.8.0 oauthlib==3.1.0 diff --git a/requirements-dev.txt b/requirements-dev.txt index ee2769da44691..f2561e3a671f1 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -39,7 +39,7 @@ sentry-forked-django-stubs>=5.0.2.post10 sentry-forked-djangorestframework-stubs>=3.15.0.post1 lxml-stubs msgpack-types>=0.2.0 -mypy>=1.11 +mypy>=1.11.1 types-beautifulsoup4 types-cachetools types-croniter From 16b75635ac031bb5369673ace4ecdf016961a394 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 31 Jul 2024 13:49:31 -0400 Subject: [PATCH 11/52] ref: apply pyupgrade 3.17.0 changes (#75352) doing this separate to the bump so it can be rolled forward / back atomically wrt getsentry --- src/sentry/api/utils.py | 2 +- src/sentry/apidocs/utils.py | 2 +- src/sentry/db/deletion.py | 2 +- src/sentry/db/models/manager/base.py | 4 +-- src/sentry/grouping/component.py | 2 +- .../grouping/fingerprinting/__init__.py | 2 +- src/sentry/grouping/strategies/newstyle.py | 4 +-- src/sentry/hybridcloud/apigateway/proxy.py | 2 +- src/sentry/hybridcloud/rpc/__init__.py | 2 +- src/sentry/hybridcloud/rpc/service.py | 2 +- .../integrations/repository/issue_alert.py | 2 +- .../slack/actions/notification.py | 2 +- src/sentry/models/outbox.py | 8 ++--- src/sentry/newsletter/dummy.py | 2 +- src/sentry/plugins/base/manager.py | 10 +++--- src/sentry/replays/post_process.py | 6 ++-- src/sentry/replays/query.py | 2 +- .../replays/usecases/ingest/dom_index.py | 2 +- src/sentry/replays/usecases/replay_counts.py | 2 +- src/sentry/rules/actions/base.py | 2 +- .../integrations/create_ticket/base.py | 2 +- .../rules/actions/notify_event_service.py | 2 +- .../rules/actions/sentry_apps/notify_event.py | 2 +- src/sentry/rules/registry.py | 2 +- src/sentry/runner/commands/backup.py | 8 ++--- src/sentry/runner/commands/devservices.py | 2 +- src/sentry/runner/commands/workstations.py | 2 +- src/sentry/services/http.py | 2 +- src/sentry/silo/base.py | 4 +-- src/sentry/snuba/metrics/utils.py | 2 +- src/sentry/statistical_detectors/detector.py | 34 +++++++++---------- src/sentry/tasks/statistical_detectors.py | 14 ++++---- src/sentry/testutils/helpers/socket.py | 2 +- src/sentry/testutils/helpers/task_runner.py | 6 ++-- src/sentry/testutils/silo.py | 6 ++-- src/sentry/utils/json.py | 2 +- src/sentry/utils/locking/lock.py | 2 +- src/sentry/utils/relocation.py | 2 +- src/sentry/utils/sdk.py | 2 +- src/sentry/web/frontend/csv.py | 2 +- src/sentry/web/frontend/debug/mail.py | 2 +- tests/sentry/utils/test_assets.py | 8 ++--- tools/flake8_plugin.py | 2 +- 43 files changed, 83 insertions(+), 91 deletions(-) diff --git a/src/sentry/api/utils.py b/src/sentry/api/utils.py index 804c7429bdb3d..294b84bf509cf 100644 --- a/src/sentry/api/utils.py +++ b/src/sentry/api/utils.py @@ -343,7 +343,7 @@ def get_auth_api_token_type(auth: object) -> str | None: @contextmanager -def handle_query_errors() -> Generator[None, None, None]: +def handle_query_errors() -> Generator[None]: try: yield except InvalidSearchQuery as error: diff --git a/src/sentry/apidocs/utils.py b/src/sentry/apidocs/utils.py index 9f1814556eab3..19f42c07a9867 100644 --- a/src/sentry/apidocs/utils.py +++ b/src/sentry/apidocs/utils.py @@ -64,7 +64,7 @@ def __init__(self, msg: str = "", *args: Any, **kwargs: Any) -> None: def reload_module_with_type_checking_enabled(module_name: str) -> None: @contextmanager - def _patch_type_checking_const() -> Generator[None, None, None]: + def _patch_type_checking_const() -> Generator[None]: try: setattr(typing, "TYPE_CHECKING", True) yield diff --git a/src/sentry/db/deletion.py b/src/sentry/db/deletion.py index 80a91352118ee..0fb629e5af8f6 100644 --- a/src/sentry/db/deletion.py +++ b/src/sentry/db/deletion.py @@ -74,7 +74,7 @@ def _continuous_query(self, query): cursor.execute(query) results = cursor.rowcount > 0 - def iterator(self, chunk_size=100, batch_size=100000) -> Generator[tuple[int, ...], None, None]: + def iterator(self, chunk_size=100, batch_size=100000) -> Generator[tuple[int, ...]]: assert self.days is not None assert self.dtfield is not None and self.dtfield == self.order_by diff --git a/src/sentry/db/models/manager/base.py b/src/sentry/db/models/manager/base.py index 873ff080069ac..6ea31d22dd665 100644 --- a/src/sentry/db/models/manager/base.py +++ b/src/sentry/db/models/manager/base.py @@ -103,7 +103,7 @@ def __init__( @staticmethod @contextmanager - def local_cache() -> Generator[None, None, None]: + def local_cache() -> Generator[None]: """Enables local caching for the entire process.""" global _local_cache_enabled, _local_cache_generation if _local_cache_enabled: @@ -513,7 +513,7 @@ def get_queryset(self) -> BaseQuerySet[M]: @contextmanager def register_trigger( self, condition: ModelManagerTriggerCondition, action: ModelManagerTriggerAction - ) -> Generator[None, None, None]: + ) -> Generator[None]: """Register a callback for when an operation is executed inside the context. There is no guarantee whether the action will be called before or after the diff --git a/src/sentry/grouping/component.py b/src/sentry/grouping/component.py index 153a6ac2c4219..b64a4057d4325 100644 --- a/src/sentry/grouping/component.py +++ b/src/sentry/grouping/component.py @@ -154,7 +154,7 @@ def shallow_copy(self) -> GroupingComponent: rv.values = list(self.values) return rv - def iter_values(self) -> Generator[str | GroupingComponent, None, None]: + def iter_values(self) -> Generator[str | GroupingComponent]: """Recursively walks the component and flattens it into a list of values. """ diff --git a/src/sentry/grouping/fingerprinting/__init__.py b/src/sentry/grouping/fingerprinting/__init__.py index a991856c0f120..36ebefdfa2e51 100644 --- a/src/sentry/grouping/fingerprinting/__init__.py +++ b/src/sentry/grouping/fingerprinting/__init__.py @@ -223,7 +223,7 @@ def __init__( self.changelog = changelog self.bases = bases or [] - def iter_rules(self, include_builtin: bool = True) -> Generator[Rule, None, None]: + def iter_rules(self, include_builtin: bool = True) -> Generator[Rule]: if self.rules: yield from self.rules if include_builtin: diff --git a/src/sentry/grouping/strategies/newstyle.py b/src/sentry/grouping/strategies/newstyle.py index 3e5b5d798bbc8..5e15d0e5e7985 100644 --- a/src/sentry/grouping/strategies/newstyle.py +++ b/src/sentry/grouping/strategies/newstyle.py @@ -782,7 +782,7 @@ def get_child_exceptions(exception: SingleException) -> list[SingleException]: # For examples, see https://github.com/getsentry/rfcs/blob/main/text/0079-exception-groups.md#sentry-issue-grouping def get_top_level_exceptions( exception: SingleException, - ) -> Generator[SingleException, None, None]: + ) -> Generator[SingleException]: if exception.mechanism.is_exception_group: children = get_child_exceptions(exception) yield from itertools.chain.from_iterable( @@ -793,7 +793,7 @@ def get_top_level_exceptions( # This recursive generator gets the "first-path" of exceptions, and is used below. # The first path follows from the root to a leaf node, but only following the first child of each node. - def get_first_path(exception: SingleException) -> Generator[SingleException, None, None]: + def get_first_path(exception: SingleException) -> Generator[SingleException]: yield exception children = get_child_exceptions(exception) if children: diff --git a/src/sentry/hybridcloud/apigateway/proxy.py b/src/sentry/hybridcloud/apigateway/proxy.py index 95d96ae485127..a6ddeae1e5a41 100644 --- a/src/sentry/hybridcloud/apigateway/proxy.py +++ b/src/sentry/hybridcloud/apigateway/proxy.py @@ -55,7 +55,7 @@ def _parse_response(response: ExternalResponse, remote_url: str) -> StreamingHtt Convert the Responses class from requests into the drf Response """ - def stream_response() -> Generator[bytes, None, None]: + def stream_response() -> Generator[bytes]: yield from response.iter_content(PROXY_CHUNK_SIZE) streamed_response = StreamingHttpResponse( diff --git a/src/sentry/hybridcloud/rpc/__init__.py b/src/sentry/hybridcloud/rpc/__init__.py index a417251e9a93b..d644e115ef78c 100644 --- a/src/sentry/hybridcloud/rpc/__init__.py +++ b/src/sentry/hybridcloud/rpc/__init__.py @@ -130,7 +130,7 @@ def __init__(self, mapping: Mapping[SiloMode, Callable[[], ServiceInterface]]): @contextlib.contextmanager def with_replacement( self, service: ServiceInterface | None, silo_mode: SiloMode - ) -> Generator[None, None, None]: + ) -> Generator[None]: with self._lock: prev = self._singleton.get(silo_mode, None) self._singleton[silo_mode] = service diff --git a/src/sentry/hybridcloud/rpc/service.py b/src/sentry/hybridcloud/rpc/service.py index 9ab60c74bfe16..3a17638407d23 100644 --- a/src/sentry/hybridcloud/rpc/service.py +++ b/src/sentry/hybridcloud/rpc/service.py @@ -582,7 +582,7 @@ def _send_to_remote_silo(self, use_test_client: bool) -> Any: self._raise_from_response_status_error(response) @contextmanager - def _open_request_context(self) -> Generator[None, None, None]: + def _open_request_context(self) -> Generator[None]: timer = metrics.timer("hybrid_cloud.dispatch_rpc.duration", tags=self._metrics_tags()) span = sentry_sdk.start_span( op="hybrid_cloud.dispatch_rpc", diff --git a/src/sentry/integrations/repository/issue_alert.py b/src/sentry/integrations/repository/issue_alert.py index 0d505423bf652..0160bbf7328e9 100644 --- a/src/sentry/integrations/repository/issue_alert.py +++ b/src/sentry/integrations/repository/issue_alert.py @@ -158,7 +158,7 @@ def create_notification_message( def get_all_parent_notification_messages_by_filters( self, group_ids: list[int] | None = None, project_ids: list[int] | None = None - ) -> Generator[IssueAlertNotificationMessage, None, None]: + ) -> Generator[IssueAlertNotificationMessage]: """ If no filters are passed, then all parent notification objects are returned. diff --git a/src/sentry/integrations/slack/actions/notification.py b/src/sentry/integrations/slack/actions/notification.py index 1e04b4197331c..d2de266cad25d 100644 --- a/src/sentry/integrations/slack/actions/notification.py +++ b/src/sentry/integrations/slack/actions/notification.py @@ -71,7 +71,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: def after( self, event: GroupEvent, notification_uuid: str | None = None - ) -> Generator[CallbackFuture, None, None]: + ) -> Generator[CallbackFuture]: channel = self.get_option("channel_id") tags = set(self.get_tags_list()) diff --git a/src/sentry/models/outbox.py b/src/sentry/models/outbox.py index ec63973369f3b..b4caa6ccb9458 100644 --- a/src/sentry/models/outbox.py +++ b/src/sentry/models/outbox.py @@ -541,9 +541,7 @@ def save(self, **kwds: Any) -> None: # type: ignore[override] super().save(**kwds) @contextlib.contextmanager - def process_shard( - self, latest_shard_row: OutboxBase | None - ) -> Generator[OutboxBase | None, None, None]: + def process_shard(self, latest_shard_row: OutboxBase | None) -> Generator[OutboxBase | None]: flush_all: bool = not bool(latest_shard_row) next_shard_row: OutboxBase | None using: str = db.router.db_for_write(type(self)) @@ -567,7 +565,7 @@ def process_shard( def process_coalesced( self, is_synchronous_flush: bool, - ) -> Generator[OutboxBase | None, None, None]: + ) -> Generator[OutboxBase | None]: coalesced: OutboxBase | None = self.select_coalesced_messages().last() first_coalesced: OutboxBase | None = self.select_coalesced_messages().first() or coalesced tags: dict[str, int | str] = {"category": "None", "synchronous": int(is_synchronous_flush)} @@ -849,7 +847,7 @@ class OutboxContext(threading.local): @contextlib.contextmanager def outbox_context( inner: Atomic | None = None, flush: bool | None = None -) -> Generator[Atomic | None, None, None]: +) -> Generator[Atomic | None]: # If we don't specify our flush, use the outer specified override if flush is None: flush = _outbox_context.flushing_enabled diff --git a/src/sentry/newsletter/dummy.py b/src/sentry/newsletter/dummy.py index 8f155735a7e37..e940d75a6ffc9 100644 --- a/src/sentry/newsletter/dummy.py +++ b/src/sentry/newsletter/dummy.py @@ -77,7 +77,7 @@ def __init__(self, enabled: bool = False) -> None: self._enabled = enabled @contextlib.contextmanager - def enable(self) -> Generator[None, None, None]: + def enable(self) -> Generator[None]: self._enabled = True try: yield diff --git a/src/sentry/plugins/base/manager.py b/src/sentry/plugins/base/manager.py index ad5840c85c4d2..5c832d5afb39e 100644 --- a/src/sentry/plugins/base/manager.py +++ b/src/sentry/plugins/base/manager.py @@ -22,18 +22,18 @@ def __len__(self) -> int: return sum(1 for i in self.all()) @overload - def all(self) -> Generator[Plugin, None, None]: + def all(self) -> Generator[Plugin]: ... @overload - def all(self, *, version: Literal[2]) -> Generator[Plugin2, None, None]: + def all(self, *, version: Literal[2]) -> Generator[Plugin2]: ... @overload - def all(self, *, version: None) -> Generator[Plugin | Plugin2, None, None]: + def all(self, *, version: None) -> Generator[Plugin | Plugin2]: ... - def all(self, version: int | None = 1) -> Generator[Plugin | Plugin2, None, None]: + def all(self, version: int | None = 1) -> Generator[Plugin | Plugin2]: for plugin in sorted(super().all(), key=lambda x: x.get_title()): if not plugin.is_enabled(): continue @@ -41,7 +41,7 @@ def all(self, version: int | None = 1) -> Generator[Plugin | Plugin2, None, None continue yield plugin - def plugin_that_can_be_configured(self) -> Generator[Plugin | Plugin2, None, None]: + def plugin_that_can_be_configured(self) -> Generator[Plugin | Plugin2]: for plugin in self.all(version=None): if plugin.has_project_conf(): yield plugin diff --git a/src/sentry/replays/post_process.py b/src/sentry/replays/post_process.py index e2fa0522a4fc6..432da4daa73df 100644 --- a/src/sentry/replays/post_process.py +++ b/src/sentry/replays/post_process.py @@ -85,7 +85,7 @@ def process_raw_response( def generate_restricted_fieldset( fields: list[str], - response: Generator[ReplayDetailsResponse, None, None], + response: Generator[ReplayDetailsResponse], ) -> Iterator[ReplayDetailsResponse]: """Return only the fields requested by the client.""" if fields: @@ -101,9 +101,7 @@ def _strip_dashes(field: str) -> str: return field -def generate_normalized_output( - response: list[dict[str, Any]] -) -> Generator[ReplayDetailsResponse, None, None]: +def generate_normalized_output(response: list[dict[str, Any]]) -> Generator[ReplayDetailsResponse]: """For each payload in the response strip "agg_" prefixes.""" for item in response: ret_item: ReplayDetailsResponse = {} diff --git a/src/sentry/replays/query.py b/src/sentry/replays/query.py index 43990ada0a076..ee3a7f781da0b 100644 --- a/src/sentry/replays/query.py +++ b/src/sentry/replays/query.py @@ -828,7 +828,7 @@ def select_from_fields(fields: list[str], user_id: int | None) -> list[Column | return selection -def _extract_children(expression: ParenExpression) -> Generator[SearchFilter, None, None]: +def _extract_children(expression: ParenExpression) -> Generator[SearchFilter]: for child in expression.children: if isinstance(child, SearchFilter): yield child diff --git a/src/sentry/replays/usecases/ingest/dom_index.py b/src/sentry/replays/usecases/ingest/dom_index.py index 28ab091ae380a..93c0da0c7793a 100644 --- a/src/sentry/replays/usecases/ingest/dom_index.py +++ b/src/sentry/replays/usecases/ingest/dom_index.py @@ -323,7 +323,7 @@ def _project_has_option_enabled() -> bool: return all([_project_has_feature_enabled(), _project_has_option_enabled()]) -def _iter_custom_events(events: list[dict[str, Any]]) -> Generator[dict[str, Any], None, None]: +def _iter_custom_events(events: list[dict[str, Any]]) -> Generator[dict[str, Any]]: for event in events: if event.get("type") == 5: yield event diff --git a/src/sentry/replays/usecases/replay_counts.py b/src/sentry/replays/usecases/replay_counts.py index 0cbfecabe5219..667a7b82bb2f6 100644 --- a/src/sentry/replays/usecases/replay_counts.py +++ b/src/sentry/replays/usecases/replay_counts.py @@ -181,7 +181,7 @@ def _get_select_column(query: str) -> tuple[str, Sequence[Any]]: return condition.key.name, condition.value.raw_value -def extract_columns_recursive(query: list[Any]) -> Generator[SearchFilter, None, None]: +def extract_columns_recursive(query: list[Any]) -> Generator[SearchFilter]: for condition in query: if isinstance(condition, SearchFilter): if condition.key.name in ("issue.id", "transaction", "replay_id"): diff --git a/src/sentry/rules/actions/base.py b/src/sentry/rules/actions/base.py index bf04be08e1a33..cddf9d1bd1a41 100644 --- a/src/sentry/rules/actions/base.py +++ b/src/sentry/rules/actions/base.py @@ -38,7 +38,7 @@ class EventAction(RuleBase, abc.ABC): @abc.abstractmethod def after( self, event: GroupEvent, notification_uuid: str | None = None - ) -> Generator[CallbackFuture, None, None]: + ) -> Generator[CallbackFuture]: """ Executed after a Rule matches. diff --git a/src/sentry/rules/actions/integrations/create_ticket/base.py b/src/sentry/rules/actions/integrations/create_ticket/base.py index 061aba97ffb02..12daf2aa12ad7 100644 --- a/src/sentry/rules/actions/integrations/create_ticket/base.py +++ b/src/sentry/rules/actions/integrations/create_ticket/base.py @@ -86,7 +86,7 @@ def generate_footer(self, rule_url: str) -> str: def after( self, event: GroupEvent, notification_uuid: str | None = None - ) -> Generator[CallbackFuture, None, None]: + ) -> Generator[CallbackFuture]: integration_id = self.get_integration_id() key = f"{self.provider}:{integration_id}" yield self.future( diff --git a/src/sentry/rules/actions/notify_event_service.py b/src/sentry/rules/actions/notify_event_service.py index c02b9e0cbb79f..8fe2eb6e40057 100644 --- a/src/sentry/rules/actions/notify_event_service.py +++ b/src/sentry/rules/actions/notify_event_service.py @@ -140,7 +140,7 @@ def transform_title(self, title: str) -> str: def after( self, event: GroupEvent, notification_uuid: str | None = None - ) -> Generator[CallbackFuture, None, None]: + ) -> Generator[CallbackFuture]: service = self.get_option("service") extra: dict[str, object] = {"event_id": event.event_id} diff --git a/src/sentry/rules/actions/sentry_apps/notify_event.py b/src/sentry/rules/actions/sentry_apps/notify_event.py index 0c844a11aae69..4fad36d35a71f 100644 --- a/src/sentry/rules/actions/sentry_apps/notify_event.py +++ b/src/sentry/rules/actions/sentry_apps/notify_event.py @@ -142,7 +142,7 @@ def self_validate(self) -> None: def after( self, event: GroupEvent, notification_uuid: str | None = None - ) -> Generator[CallbackFuture, None, None]: + ) -> Generator[CallbackFuture]: sentry_app = self._get_sentry_app(event) yield self.future( notify_sentry_app, diff --git a/src/sentry/rules/registry.py b/src/sentry/rules/registry.py index b96b78f11d868..b5eec92f09af1 100644 --- a/src/sentry/rules/registry.py +++ b/src/sentry/rules/registry.py @@ -14,7 +14,7 @@ def __init__(self) -> None: def __contains__(self, rule_id: str) -> bool: return rule_id in self._map - def __iter__(self) -> Generator[tuple[str, type[RuleBase]], None, None]: + def __iter__(self) -> Generator[tuple[str, type[RuleBase]]]: for rule_type, rule_list in self._rules.items(): for rule in rule_list: yield rule_type, rule diff --git a/src/sentry/runner/commands/backup.py b/src/sentry/runner/commands/backup.py index becb3c6234d85..040078a5f08d4 100644 --- a/src/sentry/runner/commands/backup.py +++ b/src/sentry/runner/commands/backup.py @@ -290,9 +290,7 @@ def print_elapsed_time(kind: str, interval_ms: int, done_event: Event, printer: @contextmanager -def write_import_findings( - findings_file: IO[str] | None, printer: Printer -) -> Generator[None, None, None]: +def write_import_findings(findings_file: IO[str] | None, printer: Printer) -> Generator[None]: """ Helper that ensures that we write findings for the `import ...` command regardless of outcome. """ @@ -319,9 +317,7 @@ def write_import_findings( @contextmanager -def write_export_findings( - findings_file: IO[str] | None, printer: Printer -) -> Generator[None, None, None]: +def write_export_findings(findings_file: IO[str] | None, printer: Printer) -> Generator[None]: """ Helper that ensures that we write findings for the `export ...` command regardless of outcome. """ diff --git a/src/sentry/runner/commands/devservices.py b/src/sentry/runner/commands/devservices.py index 068e20f82bceb..f7d442ef27974 100644 --- a/src/sentry/runner/commands/devservices.py +++ b/src/sentry/runner/commands/devservices.py @@ -52,7 +52,7 @@ @contextlib.contextmanager -def get_docker_client() -> Generator[docker.DockerClient, None, None]: +def get_docker_client() -> Generator[docker.DockerClient]: import docker def _client() -> ContextManager[docker.DockerClient]: diff --git a/src/sentry/runner/commands/workstations.py b/src/sentry/runner/commands/workstations.py index 7e5cbd9468f19..e71e31d61ee50 100644 --- a/src/sentry/runner/commands/workstations.py +++ b/src/sentry/runner/commands/workstations.py @@ -321,7 +321,7 @@ def _get_open_port() -> int: @contextmanager -def gcloud_manager(ctx: click.Context, project: str) -> Generator[None, None, None]: +def gcloud_manager(ctx: click.Context, project: str) -> Generator[None]: """ Handles call(s) into the `gcloud` binary gracefully. """ diff --git a/src/sentry/services/http.py b/src/sentry/services/http.py index 256ed616dd6ab..d4bff72be7013 100644 --- a/src/sentry/services/http.py +++ b/src/sentry/services/http.py @@ -22,7 +22,7 @@ """ -def convert_options_to_env(options: dict[str, Any]) -> Generator[tuple[str, str], None, None]: +def convert_options_to_env(options: dict[str, Any]) -> Generator[tuple[str, str]]: for k, v in options.items(): if v is None: continue diff --git a/src/sentry/silo/base.py b/src/sentry/silo/base.py index 9119a80b28e0a..dae50e9c15761 100644 --- a/src/sentry/silo/base.py +++ b/src/sentry/silo/base.py @@ -61,7 +61,7 @@ class SingleProcessSiloModeState(threading.local): @staticmethod @contextlib.contextmanager - def enter(mode: SiloMode, region: Region | None = None) -> Generator[None, None, None]: + def enter(mode: SiloMode, region: Region | None = None) -> Generator[None]: """ Prevents re-entrant cases unless the exit_single_process_silo_context is explicitly embedded, ensuring that this single process silo mode simulates @@ -72,7 +72,7 @@ def enter(mode: SiloMode, region: Region | None = None) -> Generator[None, None, @staticmethod @contextlib.contextmanager - def exit() -> Generator[None, None, None]: + def exit() -> Generator[None]: """ Used by silo endpoint decorators and other contexts to signal that a potential inter process interaction is being simulated locally for acceptance diff --git a/src/sentry/snuba/metrics/utils.py b/src/sentry/snuba/metrics/utils.py index 45d20744d0e72..441925a887e14 100644 --- a/src/sentry/snuba/metrics/utils.py +++ b/src/sentry/snuba/metrics/utils.py @@ -541,7 +541,7 @@ def get_num_intervals( def get_intervals( start: datetime, end: datetime, granularity: int, interval: int | None = None -) -> Generator[datetime, None, None]: +) -> Generator[datetime]: if interval is None: interval = granularity diff --git a/src/sentry/statistical_detectors/detector.py b/src/sentry/statistical_detectors/detector.py index 032d0fb7df7fb..f1b28935092da 100644 --- a/src/sentry/statistical_detectors/detector.py +++ b/src/sentry/statistical_detectors/detector.py @@ -82,7 +82,7 @@ def all_payloads( cls, projects: list[Project], start: datetime, - ) -> Generator[DetectorPayload, None, None]: + ) -> Generator[DetectorPayload]: projects_per_query = options.get("statistical_detectors.query.batch_size") assert projects_per_query > 0 @@ -104,7 +104,7 @@ def query_payloads( @classmethod def detect_trends( cls, projects: list[Project], start: datetime, batch_size=100 - ) -> Generator[TrendBundle, None, None]: + ) -> Generator[TrendBundle]: unique_project_ids: set[int] = set() total_count = 0 @@ -173,7 +173,7 @@ def detect_trends( @classmethod def all_timeseries( cls, objects: list[tuple[Project, int | str]], start: datetime, function: str, chunk_size=25 - ) -> Generator[tuple[int, int | str, SnubaTSResult], None, None]: + ) -> Generator[tuple[int, int | str, SnubaTSResult]]: # Snuba allows 10,000 data points per request. 14 days * 1hr * 24hr = # 336 data points per transaction name, so we can safely get 25 transaction # timeseries. @@ -200,7 +200,7 @@ def detect_regressions( start: datetime, function: str, timeseries_per_batch=10, - ) -> Generator[BreakpointData, None, None]: + ) -> Generator[BreakpointData]: serializer = SnubaTSResultSerializer(None, None, None) for chunk in chunked(cls.all_timeseries(objects, start, function), timeseries_per_batch): @@ -243,9 +243,9 @@ def detect_regressions( @classmethod def limit_regressions_by_project( cls, - bundles: Generator[TrendBundle, None, None], + bundles: Generator[TrendBundle], ratelimit: int | None = None, - ) -> Generator[TrendBundle, None, None]: + ) -> Generator[TrendBundle]: if ratelimit is None: ratelimit = options.get("statistical_detectors.ratelimit.ema") @@ -288,9 +288,9 @@ def make_status_change_message( @classmethod def get_regression_groups( cls, - bundles: Generator[TrendBundle, None, None], + bundles: Generator[TrendBundle], batch_size=100, - ) -> Generator[TrendBundle, None, None]: + ) -> Generator[TrendBundle]: for trend_chunk in chunked(bundles, batch_size): active_regression_groups = { (group.project_id, group.fingerprint): group @@ -325,10 +325,10 @@ def get_regression_groups( @classmethod def redirect_resolutions( cls, - bundles: Generator[TrendBundle, None, None], + bundles: Generator[TrendBundle], timestamp: datetime, batch_size=100, - ) -> Generator[TrendBundle, None, None]: + ) -> Generator[TrendBundle]: groups_to_resolve = [] for bundle in bundles: @@ -373,10 +373,10 @@ def redirect_resolutions( @classmethod def redirect_escalations( cls, - bundles: Generator[TrendBundle, None, None], + bundles: Generator[TrendBundle], timestamp: datetime, batch_size=100, - ) -> Generator[TrendBundle, None, None]: + ) -> Generator[TrendBundle]: escalated = 0 candidates = [] @@ -459,7 +459,7 @@ def _filter_escalating_groups( cls, bundles_to_escalate: list[TrendBundle], batch_size=100, - ) -> Generator[TrendBundle, None, None]: + ) -> Generator[TrendBundle]: for bundles in chunked(bundles_to_escalate, batch_size): pairs = { generate_issue_group_key( @@ -493,9 +493,9 @@ def _filter_escalating_groups( @classmethod def get_regression_versions( cls, - regressions: Generator[BreakpointData, None, None], + regressions: Generator[BreakpointData], batch_size=100, - ) -> Generator[tuple[int, datetime | None, BreakpointData], None, None]: + ) -> Generator[tuple[int, datetime | None, BreakpointData]]: active_regressions = [] for regression_chunk in chunked(regressions, batch_size): @@ -565,9 +565,9 @@ def get_regression_versions( @classmethod def save_regressions_with_versions( cls, - regressions: Generator[BreakpointData, None, None], + regressions: Generator[BreakpointData], batch_size=100, - ) -> Generator[BreakpointData, None, None]: + ) -> Generator[BreakpointData]: versioned_regressions = cls.get_regression_versions(regressions) for regression_chunk in chunked(versioned_regressions, batch_size): diff --git a/src/sentry/tasks/statistical_detectors.py b/src/sentry/tasks/statistical_detectors.py index c2c64fd9ae708..8c50d84419c68 100644 --- a/src/sentry/tasks/statistical_detectors.py +++ b/src/sentry/tasks/statistical_detectors.py @@ -97,7 +97,7 @@ def get_performance_issue_settings(projects: list[Project]): return project_settings -def all_projects_with_flags() -> Generator[tuple[int, int], None, None]: +def all_projects_with_flags() -> Generator[tuple[int, int]]: yield from RangeQuerySetWrapper( Project.objects.filter(status=ObjectStatus.ACTIVE).values_list("id", "flags"), result_value_getter=lambda item: item[0], @@ -146,9 +146,9 @@ def compute_delay( def dispatch_performance_projects( - all_projects: Generator[tuple[int, int], None, None], + all_projects: Generator[tuple[int, int]], timestamp: datetime, -) -> Generator[tuple[int, int], None, None]: +) -> Generator[tuple[int, int]]: projects = [] count = 0 @@ -190,9 +190,9 @@ def dispatch_performance_projects( def dispatch_profiling_projects( - all_projects: Generator[tuple[int, int], None, None], + all_projects: Generator[tuple[int, int]], timestamp: datetime, -) -> Generator[tuple[int, int], None, None]: +) -> Generator[tuple[int, int]]: projects = [] count = 0 @@ -718,7 +718,7 @@ def query_transactions_timeseries( transactions: list[tuple[Project, int | str]], start: datetime, agg_function: str, -) -> Generator[tuple[int, int | str, SnubaTSResult], None, None]: +) -> Generator[tuple[int, int | str, SnubaTSResult]]: end = start.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1) days_to_query = options.get("statistical_detectors.query.transactions.timeseries_days") start = end - timedelta(days=days_to_query) @@ -922,7 +922,7 @@ def query_functions_timeseries( functions_list: list[tuple[Project, int | str]], start: datetime, agg_function: str, -) -> Generator[tuple[int, int | str, SnubaTSResult], None, None]: +) -> Generator[tuple[int, int | str, SnubaTSResult]]: projects = [project for project, _ in functions_list] # take the last 14 days as our window diff --git a/src/sentry/testutils/helpers/socket.py b/src/sentry/testutils/helpers/socket.py index 88cc346f58cd2..fab35d564ecb3 100644 --- a/src/sentry/testutils/helpers/socket.py +++ b/src/sentry/testutils/helpers/socket.py @@ -11,7 +11,7 @@ @contextlib.contextmanager -def override_blocklist(*ip_addresses: str) -> Generator[None, None, None]: +def override_blocklist(*ip_addresses: str) -> Generator[None]: with mock.patch.object( net_socket, "DISALLOWED_IPS", diff --git a/src/sentry/testutils/helpers/task_runner.py b/src/sentry/testutils/helpers/task_runner.py index de94af5ae2f1a..6706a5d3bd27e 100644 --- a/src/sentry/testutils/helpers/task_runner.py +++ b/src/sentry/testutils/helpers/task_runner.py @@ -13,7 +13,7 @@ @contextlib.contextmanager -def TaskRunner() -> Generator[None, None, None]: +def TaskRunner() -> Generator[None]: prev = settings.CELERY_ALWAYS_EAGER settings.CELERY_ALWAYS_EAGER = True current_app.conf.CELERY_ALWAYS_EAGER = True @@ -60,7 +60,7 @@ def _apply_async( self.queue.append((task, args, {} if kwargs is None else kwargs)) @contextlib.contextmanager - def _patched(self) -> Generator[Self, None, None]: + def _patched(self) -> Generator[Self]: if self._active: raise AssertionError("nested BurstTaskRunner!") @@ -72,7 +72,7 @@ def _patched(self) -> Generator[Self, None, None]: self._active = False @contextlib.contextmanager - def temporarily_enable_normal_task_processing(self) -> Generator[None, None, None]: + def temporarily_enable_normal_task_processing(self) -> Generator[None]: if not self._active: raise AssertionError("cannot disable burst when not active") diff --git a/src/sentry/testutils/silo.py b/src/sentry/testutils/silo.py index d27fadbd52be0..eaa046f6a0418 100644 --- a/src/sentry/testutils/silo.py +++ b/src/sentry/testutils/silo.py @@ -42,7 +42,7 @@ class LocalSiloModeState(threading.local): state = LocalSiloModeState() @contextlib.contextmanager - def enter(mode: SiloMode, region: Region | None = None) -> Generator[None, None, None]: + def enter(mode: SiloMode, region: Region | None = None) -> Generator[None]: assert state.mode is None, ( "Re-entrant invariant broken! Use exit_single_process_silo_context " "to explicit pass 'fake' RPC boundaries." @@ -59,7 +59,7 @@ def enter(mode: SiloMode, region: Region | None = None) -> Generator[None, None, state.region = old_region @contextlib.contextmanager - def exit() -> Generator[None, None, None]: + def exit() -> Generator[None]: old_mode = state.mode old_region = state.region state.mode = None @@ -196,7 +196,7 @@ def _create_overriding_test_class( silo_mode_attr = "__silo_mode_override" @contextmanager - def create_context(obj: TestCase) -> Generator[None, None, None]: + def create_context(obj: TestCase) -> Generator[None]: tagged_class, tagged_mode = getattr(obj, silo_mode_attr) if type(obj) is not tagged_class: diff --git a/src/sentry/utils/json.py b/src/sentry/utils/json.py index 12f4ec6d3064b..7eb5cf28f859a 100644 --- a/src/sentry/utils/json.py +++ b/src/sentry/utils/json.py @@ -71,7 +71,7 @@ def encode(self, o: object) -> str: chunks = self.iterencode(o, True) return "".join(chunks) - def iterencode(self, o: object, _one_shot: bool = False) -> Generator[str, None, None]: + def iterencode(self, o: object, _one_shot: bool = False) -> Generator[str]: chunks = super().iterencode(o, _one_shot) for chunk in chunks: chunk = chunk.replace("&", "\\u0026") diff --git a/src/sentry/utils/locking/lock.py b/src/sentry/utils/locking/lock.py index 579a47a016ee5..001b0b4298f5d 100644 --- a/src/sentry/utils/locking/lock.py +++ b/src/sentry/utils/locking/lock.py @@ -42,7 +42,7 @@ def acquire(self) -> ContextManager[None]: ) from error @contextmanager - def releaser() -> Generator[None, None, None]: + def releaser() -> Generator[None]: try: yield finally: diff --git a/src/sentry/utils/relocation.py b/src/sentry/utils/relocation.py index 475ed90c3f259..0c9d4d2e5ec45 100644 --- a/src/sentry/utils/relocation.py +++ b/src/sentry/utils/relocation.py @@ -505,7 +505,7 @@ def fail_relocation(relocation: Relocation, task: OrderedTask, reason: str = "") @contextmanager def retry_task_or_fail_relocation( relocation: Relocation, task: OrderedTask, attempts_left: int, reason: str = "" -) -> Generator[None, None, None]: +) -> Generator[None]: """ Catches all exceptions, and does one of two things: calls into `fail_relocation` if there are no retry attempts forthcoming, or simply bubbles them up (thereby triggering a celery retry) if diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 5833156aa242e..9164cd0c72494 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -92,7 +92,7 @@ UNSAFE_TAG = "_unsafe" -def _current_stack_filenames() -> Generator[str, None, None]: +def _current_stack_filenames() -> Generator[str]: f: FrameType | None = sys._getframe() while f is not None: yield f.f_code.co_filename diff --git a/src/sentry/web/frontend/csv.py b/src/sentry/web/frontend/csv.py index b90cfd49ac708..231e155bccb71 100644 --- a/src/sentry/web/frontend/csv.py +++ b/src/sentry/web/frontend/csv.py @@ -24,7 +24,7 @@ def get_row(self, item: T) -> tuple[str, ...]: raise NotImplementedError def respond(self, iterable: Iterable[T], filename: str) -> StreamingHttpResponse: - def row_iter() -> Generator[tuple[str, ...], None, None]: + def row_iter() -> Generator[tuple[str, ...]]: header = self.get_header() if header: yield header diff --git a/src/sentry/web/frontend/debug/mail.py b/src/sentry/web/frontend/debug/mail.py index a3a0d56838c24..4b8c2aba43dcb 100644 --- a/src/sentry/web/frontend/debug/mail.py +++ b/src/sentry/web/frontend/debug/mail.py @@ -146,7 +146,7 @@ def make_group_metadata(random: Random) -> dict[str, Any]: } -def make_group_generator(random: Random, project: Project) -> Generator[Group, None, None]: +def make_group_generator(random: Random, project: Project) -> Generator[Group]: epoch = int(datetime(2016, 6, 1, 0, 0, 0, tzinfo=timezone.utc).timestamp()) for id in itertools.count(1): first_seen = epoch + random.randint(0, 60 * 60 * 24 * 30) diff --git a/tests/sentry/utils/test_assets.py b/tests/sentry/utils/test_assets.py index ec9682fdf9ba3..7fb133ee69cd3 100644 --- a/tests/sentry/utils/test_assets.py +++ b/tests/sentry/utils/test_assets.py @@ -11,7 +11,7 @@ @pytest.fixture(autouse=True) -def reset_cache() -> Generator[None, None, None]: +def reset_cache() -> Generator[None]: # https://github.com/python/mypy/issues/5107 assets._frontend_versions.cache_clear() # type: ignore[attr-defined] yield @@ -19,7 +19,7 @@ def reset_cache() -> Generator[None, None, None]: @pytest.fixture -def self_hosted(tmp_path: pathlib.Path) -> Generator[None, None, None]: +def self_hosted(tmp_path: pathlib.Path) -> Generator[None]: with mock.patch.object(settings, "STATIC_FRONTEND_APP_URL", "/_static/dist/"): conf_dir = tmp_path.joinpath("conf") conf_dir.mkdir() @@ -28,7 +28,7 @@ def self_hosted(tmp_path: pathlib.Path) -> Generator[None, None, None]: @pytest.fixture -def getsentry_no_configmap(tmp_path: pathlib.Path) -> Generator[None, None, None]: +def getsentry_no_configmap(tmp_path: pathlib.Path) -> Generator[None]: # shouldn't actually happen -- but make sure it still works! with mock.patch.object( settings, "STATIC_FRONTEND_APP_URL", "https://static.example.com/_static/dist/" @@ -40,7 +40,7 @@ def getsentry_no_configmap(tmp_path: pathlib.Path) -> Generator[None, None, None @pytest.fixture -def getsentry(tmp_path: pathlib.Path) -> Generator[None, None, None]: +def getsentry(tmp_path: pathlib.Path) -> Generator[None]: with mock.patch.object( settings, "STATIC_FRONTEND_APP_URL", "https://static.example.com/_static/dist/" ): diff --git a/tools/flake8_plugin.py b/tools/flake8_plugin.py index 9da870d6c5585..f29f012913e83 100644 --- a/tools/flake8_plugin.py +++ b/tools/flake8_plugin.py @@ -152,7 +152,7 @@ def __init__(self, tree: ast.AST, filename: str) -> None: self.tree = tree self.filename = filename - def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: + def run(self) -> Generator[tuple[int, int, str, type[Any]]]: visitor = SentryVisitor(self.filename) visitor.visit(self.tree) From 1a59af245121a0e44796c880eb2612e88f72bf34 Mon Sep 17 00:00:00 2001 From: Snigdha Sharma Date: Wed, 31 Jul 2024 10:49:36 -0700 Subject: [PATCH 12/52] chore(issue-priority): Switch feature flags for default alerts (#75299) https://github.com/getsentry/sentry/pull/75297 adds `organizations:priority-ga-features`, which gates the GA feature set that is intended to be released to all customers. Switching the flag from `organizations:seer-based-priority` to `organizations:priority-ga-features` to release these features as intended. --- static/app/views/alerts/rules/issue/index.tsx | 2 +- static/app/views/projectInstall/issueAlertOptions.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/static/app/views/alerts/rules/issue/index.tsx b/static/app/views/alerts/rules/issue/index.tsx index 4b7cad6104770..9f9126e9c3516 100644 --- a/static/app/views/alerts/rules/issue/index.tsx +++ b/static/app/views/alerts/rules/issue/index.tsx @@ -323,7 +323,7 @@ class IssueRuleEditor extends DeprecatedAsyncView { // now that we've loaded all the possible conditions, we can populate the // value of conditions for a new alert const hasSeerBasedPriority = - this.props.organization.features.includes('seer-based-priority'); + this.props.organization.features.includes('priority-ga-features'); const hasHighPriorityIssueAlerts = this.props.organization.features.includes('default-high-priority-alerts') || this.props.project.features.includes('high-priority-alerts'); diff --git a/static/app/views/projectInstall/issueAlertOptions.tsx b/static/app/views/projectInstall/issueAlertOptions.tsx index fd012042254a3..f264f5cc096fa 100644 --- a/static/app/views/projectInstall/issueAlertOptions.tsx +++ b/static/app/views/projectInstall/issueAlertOptions.tsx @@ -193,7 +193,7 @@ class IssueAlertOptions extends DeprecatedAsyncComponent { } shouldUseNewDefaultSetting(): boolean { - if (this.props.organization.features.includes('seer-based-priority')) { + if (this.props.organization.features.includes('priority-ga-features')) { return true; } From 261267ba5fb1fc51658afb37a72c3dc38bf83e15 Mon Sep 17 00:00:00 2001 From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com> Date: Wed, 31 Jul 2024 11:07:16 -0700 Subject: [PATCH 13/52] fix(issue-details): Swap order of last/first event buttons in event navigation (#75354) this pr swaps the order of the last/first event buttons in the updated event navigation. now "first" event will come first so the buttons are chronological --- static/app/views/issueDetails/eventNavigation.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/views/issueDetails/eventNavigation.tsx b/static/app/views/issueDetails/eventNavigation.tsx index 5f0dff3c46a15..7c7c2725ea7fa 100644 --- a/static/app/views/issueDetails/eventNavigation.tsx +++ b/static/app/views/issueDetails/eventNavigation.tsx @@ -44,8 +44,8 @@ enum EventNavOptions { const EventNavLabels = { [EventNavOptions.RECOMMENDED]: t('Recommended Event'), - [EventNavOptions.LATEST]: t('Last Event'), [EventNavOptions.OLDEST]: t('First Event'), + [EventNavOptions.LATEST]: t('Last Event'), }; const eventDataSections: SectionDefinition[] = [ From c87589045d0f5cfa21d7d15e6edb14c93475c3f1 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 31 Jul 2024 14:30:37 -0400 Subject: [PATCH 14/52] ref: upgrade pyupgrade (#75356) --- requirements-dev-frozen.txt | 2 +- requirements-dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 11cfa8c477703..8af6ea681a16a 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -157,7 +157,7 @@ python-rapidjson==1.8 python-u2flib-server==5.0.0 python-utils==3.3.3 python3-saml==1.15.0 -pyupgrade==3.15.0 +pyupgrade==3.17.0 pyuwsgi==2.0.23.post0 pyvat==1.3.15 pyyaml==6.0.1 diff --git a/requirements-dev.txt b/requirements-dev.txt index f2561e3a671f1..17e550622ea69 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -26,7 +26,7 @@ black>=22.10.0 flake8>=7 flake8-bugbear>=22.10 flake8-logging>=1.5 -pyupgrade>=3.15 +pyupgrade>=3.17 isort>=5.10.1 # For tools/. To be moved into redistributable dev environments. From aa854beca283f452456f1e13a83d01476fe42276 Mon Sep 17 00:00:00 2001 From: Abdullah Khan <60121741+Abdkhan14@users.noreply.github.com> Date: Wed, 31 Jul 2024 14:41:48 -0400 Subject: [PATCH 15/52] feat(new-trace): Adding transaction quota exceeded banner (#75301) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Note that the billing checkout links will only work in prod: Screenshot 2024-07-30 at 6 01 17 PM Logic for rendering banner: - For an errors only trace - If transactions were dropped around the time a trace occured and - If the org still has transactions quota maxed out. Trace for testing (must remove feature flag check highlighted in the pr comments below): [link](https://kreios.dev.getsentry.net:7999/performance/trace/142e6c398dc44969ae673e9671f9f58d/?eventId=0c834757e75540b5ae09fc1c63374c6b&groupId=5625634392&pageEnd=2024-07-31T09%3A04%3A13.838&pageStart=2024-07-30T09%3A04%3A13.838&query=is%3Aunresolved+issue.priority%3A%5Bhigh%2C+medium%5D&referrer=issue-stream&source=issue_details&stream_index=2×tamp=1722373453) --------- Co-authored-by: Abdullah Khan Co-authored-by: edwardgou-sentry <83961295+edwardgou-sentry@users.noreply.github.com> --- .../performanceOnboarding/sidebar.tsx | 9 +- static/app/components/sidebar/index.spec.tsx | 8 +- .../app/utils/analytics/tracingEventMap.tsx | 17 ++ .../performance/newTraceDetails/index.tsx | 4 +- .../newTraceDetails/traceAnalytics.tsx | 33 +++ .../traceTypeWarnings/errorsOnlyWarnings.tsx | 269 ++++++++++++++++++ .../traceTypeWarnings/index.tsx | 33 +++ .../traceTypeWarnings/styles.tsx | 165 +++++++++++ .../useTransactionUsageStats.tsx | 67 +++++ .../traceWarnings/performanceSetupWarning.tsx | 251 ---------------- 10 files changed, 596 insertions(+), 260 deletions(-) create mode 100644 static/app/views/performance/newTraceDetails/traceTypeWarnings/errorsOnlyWarnings.tsx create mode 100644 static/app/views/performance/newTraceDetails/traceTypeWarnings/index.tsx create mode 100644 static/app/views/performance/newTraceDetails/traceTypeWarnings/styles.tsx create mode 100644 static/app/views/performance/newTraceDetails/traceTypeWarnings/useTransactionUsageStats.tsx delete mode 100644 static/app/views/performance/newTraceDetails/traceWarnings/performanceSetupWarning.tsx diff --git a/static/app/components/performanceOnboarding/sidebar.tsx b/static/app/components/performanceOnboarding/sidebar.tsx index 824a9c02d96f4..fd8cad4ea16ab 100644 --- a/static/app/components/performanceOnboarding/sidebar.tsx +++ b/static/app/components/performanceOnboarding/sidebar.tsx @@ -1,6 +1,5 @@ import {Fragment, useEffect, useMemo, useState} from 'react'; import styled from '@emotion/styled'; -import qs from 'qs'; import HighlightTopRightPattern from 'sentry-images/pattern/highlight-top-right.svg'; @@ -25,6 +24,7 @@ import {space} from 'sentry/styles/space'; import type {Project} from 'sentry/types/project'; import EventWaiter from 'sentry/utils/eventWaiter'; import useApi from 'sentry/utils/useApi'; +import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import usePrevious from 'sentry/utils/usePrevious'; import useProjects from 'sentry/utils/useProjects'; @@ -48,7 +48,7 @@ function PerformanceOnboardingSidebar(props: CommonSidebarProps) { const isActive = currentPanel === SidebarPanelKey.PERFORMANCE_ONBOARDING; const organization = useOrganization(); const hasProjectAccess = organization.access.includes('project:read'); - + const location = useLocation<{project: string[] | null}>(); const {projects, initiallyLoaded: projectsLoaded} = useProjects(); const [currentProject, setCurrentProject] = useState(undefined); @@ -59,10 +59,9 @@ function PerformanceOnboardingSidebar(props: CommonSidebarProps) { filterProjects(projects); const priorityProjectIds: Set | null = useMemo(() => { - const queryParams = qs.parse(location.search); - const decodedProjectIds = decodeProjectIds(queryParams.project); + const decodedProjectIds = decodeProjectIds(location.query.project); return decodedProjectIds === null ? null : new Set(decodedProjectIds); - }, []); + }, [location.query.project]); useEffect(() => { if ( diff --git a/static/app/components/sidebar/index.spec.tsx b/static/app/components/sidebar/index.spec.tsx index eee4238ca4fe8..ae569ec8efae6 100644 --- a/static/app/components/sidebar/index.spec.tsx +++ b/static/app/components/sidebar/index.spec.tsx @@ -65,7 +65,7 @@ describe('Sidebar', function () { }; beforeEach(function () { - mockUseLocation.mockReset(); + mockUseLocation.mockReturnValue(LocationFixture()); jest.spyOn(incidentsHook, 'useServiceIncidents').mockImplementation( () => ({ @@ -87,6 +87,10 @@ describe('Sidebar', function () { }); }); + afterEach(function () { + mockUseLocation.mockReset(); + }); + it('renders', async function () { renderSidebar({organization}); expect(await screen.findByTestId('sidebar-dropdown')).toBeInTheDocument(); @@ -301,7 +305,7 @@ describe('Sidebar', function () { ConfigStore.set('features', new Set([])); ConfigStore.set('user', user); - mockUseLocation.mockReturnValue(LocationFixture()); + mockUseLocation.mockReturnValue({...LocationFixture()}); }); it('renders navigation', async function () { diff --git a/static/app/utils/analytics/tracingEventMap.tsx b/static/app/utils/analytics/tracingEventMap.tsx index 4013224be4e44..f79e62ee24f8d 100644 --- a/static/app/utils/analytics/tracingEventMap.tsx +++ b/static/app/utils/analytics/tracingEventMap.tsx @@ -6,8 +6,18 @@ export type TracingEventParameters = { shape: string; trace_duration_seconds: number; }; + 'trace.quality.performance_setup.banner_loaded': {}; 'trace.quality.performance_setup.checklist_triggered': {}; 'trace.quality.performance_setup.learn_more_clicked': {}; + 'trace.quality.quota_exceeded.banner_loaded': { + traceType: string; + }; + 'trace.quality.quota_exceeded.increase_budget_clicked': { + traceType: string; + }; + 'trace.quality.quota_exceeded.learn_more_clicked': { + traceType: string; + }; 'trace.trace_layout.change': { layout: string; }; @@ -88,6 +98,13 @@ export const tracingEventMap: Record = { 'Triggered Performance Setup Checklist', 'trace.quality.performance_setup.learn_more_clicked': 'Clicked Learn More in Performance Setup Banner', + 'trace.quality.performance_setup.banner_loaded': 'Performance Setup Banner Loaded', + 'trace.quality.quota_exceeded.increase_budget_clicked': + 'Clicked Increase Budget in Quota Exceeded Banner', + 'trace.quality.quota_exceeded.learn_more_clicked': + 'Clicked Learn More in Quota Exceeded Banner', + 'trace.quality.quota_exceeded.banner_loaded': + 'Clicked Learn More in Performance Setup Banner', 'trace.trace_layout.view_shortcuts': 'Viewed Trace Shortcuts', 'trace.trace_warning_type': 'Viewed Trace Warning Type', 'trace.trace_layout.zoom_to_fill': 'Trace Zoom to Fill', diff --git a/static/app/views/performance/newTraceDetails/index.tsx b/static/app/views/performance/newTraceDetails/index.tsx index 972aff76bd695..03429ac0e26e3 100644 --- a/static/app/views/performance/newTraceDetails/index.tsx +++ b/static/app/views/performance/newTraceDetails/index.tsx @@ -85,12 +85,12 @@ import { DEFAULT_TRACE_VIEW_PREFERENCES, loadTraceViewPreferences, } from './traceState/tracePreferences'; -import {PerformanceSetupWarning} from './traceWarnings/performanceSetupWarning'; import {isTraceNode} from './guards'; import {Trace} from './trace'; import {TraceMetadataHeader} from './traceMetadataHeader'; import type {TraceReducer, TraceReducerState} from './traceState'; import {TraceType} from './traceType'; +import TraceTypeWarnings from './traceTypeWarnings'; import {useTraceQueryParamStateSync} from './useTraceQueryParamStateSync'; function decodeScrollQueue(maybePath: unknown): TraceTree.NodePath[] | null { @@ -957,7 +957,7 @@ export function TraceViewWaterfall(props: TraceViewWaterfallProps) { return ( - organization, }); +const trackPerformanceSetupBannerLoaded = (organization: Organization) => + trackAnalytics('trace.quality.performance_setup.banner_loaded', { + organization, + }); + +const trackQuotaExceededIncreaseBudgetClicked = ( + organization: Organization, + traceType: string +) => + trackAnalytics('trace.quality.quota_exceeded.increase_budget_clicked', { + organization, + traceType, + }); + +const trackQuotaExceededLearnMoreClicked = ( + organization: Organization, + traceType: string +) => + trackAnalytics('trace.quality.quota_exceeded.learn_more_clicked', { + organization, + traceType, + }); + +const trackQuotaExceededBannerLoaded = (organization: Organization, traceType: string) => + trackAnalytics('trace.quality.quota_exceeded.banner_loaded', { + organization, + traceType, + }); + const trackPerformanceSetupLearnMoreClicked = (organization: Organization) => trackAnalytics('trace.quality.performance_setup.learn_more_clicked', { organization, @@ -125,6 +154,10 @@ const traceAnalytics = { // Trace Quality Improvement trackPerformanceSetupChecklistTriggered, trackPerformanceSetupLearnMoreClicked, + trackPerformanceSetupBannerLoaded, + trackQuotaExceededIncreaseBudgetClicked, + trackQuotaExceededLearnMoreClicked, + trackQuotaExceededBannerLoaded, }; export {traceAnalytics}; diff --git a/static/app/views/performance/newTraceDetails/traceTypeWarnings/errorsOnlyWarnings.tsx b/static/app/views/performance/newTraceDetails/traceTypeWarnings/errorsOnlyWarnings.tsx new file mode 100644 index 0000000000000..9f39eedc86b0b --- /dev/null +++ b/static/app/views/performance/newTraceDetails/traceTypeWarnings/errorsOnlyWarnings.tsx @@ -0,0 +1,269 @@ +import {useEffect, useMemo} from 'react'; +import {browserHistory} from 'react-router'; +import styled from '@emotion/styled'; + +import connectDotsImg from 'sentry-images/spot/performance-connect-dots.svg'; +import waitingForSpansImg from 'sentry-images/spot/performance-waiting-for-span.svg'; + +import {Alert} from 'sentry/components/alert'; +import ExternalLink from 'sentry/components/links/externalLink'; +import {SidebarPanelKey} from 'sentry/components/sidebar/types'; +import {withPerformanceOnboarding} from 'sentry/data/platformCategories'; +import {t, tct} from 'sentry/locale'; +import SidebarPanelStore from 'sentry/stores/sidebarPanelStore'; +import type {Organization} from 'sentry/types/organization'; +import type {Project} from 'sentry/types/project'; +import {useApiQuery} from 'sentry/utils/queryClient'; +import {useLocation} from 'sentry/utils/useLocation'; +import useProjects from 'sentry/utils/useProjects'; + +import {traceAnalytics} from '../traceAnalytics'; +import type {TraceTree} from '../traceModels/traceTree'; +import {TraceType} from '../traceType'; + +import {TraceWarningComponents} from './styles'; +import {useTransactionUsageStats} from './useTransactionUsageStats'; + +type ErrorOnlyWarningsProps = { + organization: Organization; + traceSlug: string | undefined; + tree: TraceTree; +}; + +function filterProjects(projects: Project[], tree: TraceTree) { + const projectsWithNoPerformance: Project[] = []; + const projectsWithOnboardingChecklist: Project[] = []; + + for (const project of projects) { + if (tree.project_ids.has(Number(project.id))) { + if (!project.firstTransactionEvent) { + projectsWithNoPerformance.push(project); + if (project.platform && withPerformanceOnboarding.has(project.platform)) { + projectsWithOnboardingChecklist.push(project); + } + } + } + } + + return {projectsWithNoPerformance, projectsWithOnboardingChecklist}; +} + +type PerformanceSetupBannerProps = { + projectsWithNoPerformance: Project[]; + projectsWithOnboardingChecklist: Project[]; +} & ErrorOnlyWarningsProps; + +function PerformanceSetupBanner({ + traceSlug, + organization, + projectsWithNoPerformance, + projectsWithOnboardingChecklist, +}: PerformanceSetupBannerProps) { + const location = useLocation(); + const LOCAL_STORAGE_KEY = `${traceSlug}:performance-orphan-error-onboarding-banner-hide`; + const hideBanner = projectsWithNoPerformance.length === 0; + + useEffect(() => { + if (hideBanner) { + return; + } + + traceAnalytics.trackPerformanceSetupBannerLoaded(organization); + + if (location.hash === '#performance-sidequest') { + SidebarPanelStore.activatePanel(SidebarPanelKey.PERFORMANCE_ONBOARDING); + } + }, [projectsWithOnboardingChecklist, hideBanner, organization, location.hash]); + + if (hideBanner) { + return null; + } + + if (projectsWithOnboardingChecklist.length === 0) { + return ( + + {tct( + "Some of the projects associated with this trace don't support performance monitoring. To learn more about how to setup performance monitoring, visit our [documentation].", + { + documentationLink: ( + + {t('documentation')} + + ), + } + )} + + ); + } + + return ( + { + traceAnalytics.trackPerformanceSetupChecklistTriggered(organization); + browserHistory.replace({ + pathname: location.pathname, + query: { + ...location.query, + project: projectsWithOnboardingChecklist.map(project => project.id), + }, + hash: '#performance-sidequest', + }); + SidebarPanelStore.activatePanel(SidebarPanelKey.PERFORMANCE_ONBOARDING); + }} + onSecondaryButtonClick={() => + traceAnalytics.trackPerformanceSetupLearnMoreClicked(organization) + } + localStorageKey={LOCAL_STORAGE_KEY} + docsRoute="https://docs.sentry.io/product/performance/" + organization={organization} + primaryButtonText={t('Start Checklist')} + /> + ); +} + +type Subscription = { + categories: { + transactions: { + usageExceeded: boolean; + }; + }; + planDetails: { + billingInterval: 'monthly' | 'annual'; + hasOnDemandModes: boolean; + }; +}; + +function PerformanceQuotaExceededWarning(props: ErrorOnlyWarningsProps) { + const {data: transactionUsageStats} = useTransactionUsageStats({ + organization: props.organization, + tree: props.tree, + }); + + const {data: subscription} = useApiQuery( + [`/subscriptions/${props.organization.slug}/`], + { + staleTime: Infinity, + } + ); + + // Check if events were dropped due to exceeding the transaction quota, around when the trace occurred. + const droppedTransactionsCount = transactionUsageStats?.totals['sum(quantity)'] || 0; + + // Check if the organization still has transaction quota maxed out. + const hasExceededTransactionLimit = + subscription?.categories.transactions.usageExceeded || false; + + const hideBanner = + droppedTransactionsCount === 0 || + !props.organization.features.includes('trace-view-quota-exceeded-banner') || + !hasExceededTransactionLimit; + + useEffect(() => { + if (hideBanner) { + return; + } + + traceAnalytics.trackQuotaExceededBannerLoaded(props.organization, props.tree.shape); + }, [hideBanner, props.organization, props.tree.shape]); + + if (hideBanner) { + return null; + } + + const title = tct("You've exceeded your [billingInterval] [billingType]", { + billingInterval: subscription?.planDetails.billingInterval ?? 'monthly', + billingType: subscription?.planDetails.hasOnDemandModes + ? t('pay-as-you-go budget') + : t('quota'), + }); + + const ctaText = subscription?.planDetails?.hasOnDemandModes + ? t('Increase Budget') + : t('Increase Volumes'); + + return ( + + { + traceAnalytics.trackQuotaExceededLearnMoreClicked( + props.organization, + props.tree.shape + ); + }} + onPrimaryButtonClick={() => { + traceAnalytics.trackQuotaExceededIncreaseBudgetClicked( + props.organization, + props.tree.shape + ); + browserHistory.push({ + pathname: `/settings/billing/checkout/`, + query: { + skipBundles: true, + }, + }); + }} + docsRoute="https://docs.sentry.io/pricing/quotas/" + primaryButtonText={ctaText} + /> + + ); +} + +const Wrapper = styled('div')` + ${TraceWarningComponents.BannerBackground} { + top: 4px; + right: 40px; + height: 98%; + width: 100%; + max-width: 270px; + } +`; + +export function ErrorsOnlyWarnings({ + traceSlug, + tree, + organization, +}: ErrorOnlyWarningsProps) { + const {projects} = useProjects(); + + const {projectsWithNoPerformance, projectsWithOnboardingChecklist} = useMemo(() => { + return filterProjects(projects, tree); + }, [projects, tree]); + + if (tree.type !== 'trace' || tree.shape !== TraceType.ONLY_ERRORS) { + return null; + } + + return projectsWithNoPerformance.length > 0 ? ( + + ) : ( + + ); +} diff --git a/static/app/views/performance/newTraceDetails/traceTypeWarnings/index.tsx b/static/app/views/performance/newTraceDetails/traceTypeWarnings/index.tsx new file mode 100644 index 0000000000000..544c5fc47bfb1 --- /dev/null +++ b/static/app/views/performance/newTraceDetails/traceTypeWarnings/index.tsx @@ -0,0 +1,33 @@ +import type {Organization} from 'sentry/types/organization'; + +import type {TraceTree} from '../traceModels/traceTree'; +import {TraceType} from '../traceType'; + +import {ErrorsOnlyWarnings} from './errorsOnlyWarnings'; + +type Props = { + organization: Organization; + traceSlug: string | undefined; + tree: TraceTree; +}; + +function TraceTypeWarnings(props: Props) { + if ( + props.tree.type !== 'trace' || + props.tree.shape === TraceType.ONE_ROOT || + // Note: Just handling the errors-only-trace banners for now. + props.tree.shape !== TraceType.ONLY_ERRORS + ) { + return null; + } + + return ( + + ); +} + +export default TraceTypeWarnings; diff --git a/static/app/views/performance/newTraceDetails/traceTypeWarnings/styles.tsx b/static/app/views/performance/newTraceDetails/traceTypeWarnings/styles.tsx new file mode 100644 index 0000000000000..852b0d99a6d14 --- /dev/null +++ b/static/app/views/performance/newTraceDetails/traceTypeWarnings/styles.tsx @@ -0,0 +1,165 @@ +import styled from '@emotion/styled'; + +import {Button} from 'sentry/components/button'; +import {DropdownMenu} from 'sentry/components/dropdownMenu'; +import {IconClose} from 'sentry/icons/iconClose'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import type {Organization} from 'sentry/types/organization'; +import useDismissAlert from 'sentry/utils/useDismissAlert'; + +type BannerProps = { + description: React.ReactNode; + docsRoute: string; + image: any; + localStorageKey: string; + onPrimaryButtonClick: () => void; + onSecondaryButtonClick: () => void; + organization: Organization; + primaryButtonText: string; + title: React.ReactNode; +}; + +function Banner(props: BannerProps) { + const {dismiss: snooze, isDismissed: isSnoozed} = useDismissAlert({ + key: props.localStorageKey, + expirationDays: 7, + }); + + const {dismiss, isDismissed} = useDismissAlert({ + key: props.localStorageKey, + expirationDays: 365, + }); + + if (isDismissed || isSnoozed) { + return null; + } + + return ( + + + {props.title} + {props.description} + + + + + + + + + + + , + }} + size="xs" + items={[ + { + key: 'dismiss', + label: t('Dismiss'), + onAction: dismiss, + }, + { + key: 'snooze', + label: t('Snooze'), + onAction: snooze, + }, + ]} + /> + + ); +} + +const BannerWrapper = styled('div')` + position: relative; + border: 1px solid ${p => p.theme.border}; + border-radius: ${p => p.theme.borderRadius}; + padding: ${space(2)} ${space(3)}; + margin-bottom: ${space(2)}; + background: linear-gradient( + 90deg, + ${p => p.theme.backgroundSecondary}00 0%, + ${p => p.theme.backgroundSecondary}FF 70%, + ${p => p.theme.backgroundSecondary}FF 100% + ); + container-type: inline-size; +`; + +const ActionsWrapper = styled('div')` + max-width: 50%; +`; + +const ButtonsWrapper = styled('div')` + display: flex; + align-items: center; + gap: ${space(0.5)}; +`; + +const BannerTitle = styled('div')` + font-size: ${p => p.theme.fontSizeExtraLarge}; + margin-bottom: ${space(1)}; + font-weight: ${p => p.theme.fontWeightBold}; +`; + +const BannerDescription = styled('div')` + margin-bottom: ${space(1.5)}; +`; + +const CloseDropdownMenu = styled(DropdownMenu)` + position: absolute; + display: block; + top: ${space(1)}; + right: ${space(1)}; + color: ${p => p.theme.white}; + cursor: pointer; + z-index: 1; +`; + +const BannerBackground = styled('div')<{image: any}>` + display: flex; + justify-self: flex-end; + position: absolute; + top: 14px; + right: 15px; + height: 81%; + width: 100%; + max-width: 413px; + background-image: url(${p => p.image}); + background-repeat: no-repeat; + background-size: contain; + + @container (max-width: 840px) { + display: none; + } +`; + +const ActionButton = styled('div')` + display: flex; + gap: ${space(1)}; +`; + +const TraceWarningComponents = { + Banner, + BannerBackground, +}; + +export {TraceWarningComponents}; diff --git a/static/app/views/performance/newTraceDetails/traceTypeWarnings/useTransactionUsageStats.tsx b/static/app/views/performance/newTraceDetails/traceTypeWarnings/useTransactionUsageStats.tsx new file mode 100644 index 0000000000000..b5d02ba23af86 --- /dev/null +++ b/static/app/views/performance/newTraceDetails/traceTypeWarnings/useTransactionUsageStats.tsx @@ -0,0 +1,67 @@ +import type {Organization} from 'sentry/types/organization'; +import {useApiQuery} from 'sentry/utils/queryClient'; + +import type {TraceTree} from '../traceModels/traceTree'; + +// 1 hour in milliseconds +const ONE_HOUR = 60 * 60 * 1000; + +export type TransactionStatsGroup = { + by: { + reason: 'transaction_usage_exceeded'; + }; + totals: { + 'sum(quantity)': number; + }; +}; + +type PartialUsageStats = { + groups: TransactionStatsGroup[]; +}; + +export function useTransactionUsageStats({ + organization, + tree, +}: { + organization: Organization; + tree: TraceTree; +}) { + const traceNode = tree.root.children[0]; + + const traceStartDate = new Date(traceNode?.space?.[0]); + const traceEndDate = new Date(traceNode?.space?.[0] + traceNode?.space?.[1]); + + // Add 1 hour buffer to the trace start and end date. + const start = traceNode + ? new Date(traceStartDate.getTime() - ONE_HOUR).toISOString() + : ''; + const end = traceNode ? new Date(traceEndDate.getTime() + ONE_HOUR).toISOString() : ''; + + const pathname = `/organizations/${organization.slug}/stats_v2/`; + + const endpointOptions = { + query: { + start, + end, + interval: '1h', + groupBy: ['outcome', 'reason'], + field: 'sum(quantity)', + utc: true, + category: 'transaction_indexed', + project: Array.from(tree.project_ids), + referrer: 'trace-view-warnings', + }, + }; + + const results = useApiQuery([pathname, endpointOptions], { + staleTime: Infinity, + enabled: !!traceNode, + }); + + return { + ...results, + data: results.data?.groups.find( + group => group.by.reason === 'transaction_usage_exceeded' + ), + }; +} diff --git a/static/app/views/performance/newTraceDetails/traceWarnings/performanceSetupWarning.tsx b/static/app/views/performance/newTraceDetails/traceWarnings/performanceSetupWarning.tsx deleted file mode 100644 index f309995f7c7bb..0000000000000 --- a/static/app/views/performance/newTraceDetails/traceWarnings/performanceSetupWarning.tsx +++ /dev/null @@ -1,251 +0,0 @@ -import {useEffect, useMemo} from 'react'; -import {browserHistory} from 'react-router'; -import styled from '@emotion/styled'; -import qs from 'qs'; - -import connectDotsImg from 'sentry-images/spot/performance-connect-dots.svg'; - -import {Alert} from 'sentry/components/alert'; -import {Button} from 'sentry/components/button'; -import {DropdownMenu} from 'sentry/components/dropdownMenu'; -import ExternalLink from 'sentry/components/links/externalLink'; -import {SidebarPanelKey} from 'sentry/components/sidebar/types'; -import {withPerformanceOnboarding} from 'sentry/data/platformCategories'; -import {IconClose} from 'sentry/icons'; -import {t, tct} from 'sentry/locale'; -import SidebarPanelStore from 'sentry/stores/sidebarPanelStore'; -import {space} from 'sentry/styles/space'; -import type {Organization} from 'sentry/types/organization'; -import type {Project} from 'sentry/types/project'; -import useDismissAlert from 'sentry/utils/useDismissAlert'; -import useProjects from 'sentry/utils/useProjects'; - -import {traceAnalytics} from '../traceAnalytics'; -import type {TraceTree} from '../traceModels/traceTree'; -import {TraceType} from '../traceType'; - -type OnlyOrphanErrorWarningsProps = { - organization: Organization; - traceSlug: string | undefined; - tree: TraceTree; -}; - -function filterProjects(projects: Project[], tree: TraceTree) { - const projectsWithNoPerformance: Project[] = []; - const projectsWithOnboardingChecklist: Project[] = []; - - for (const project of projects) { - if (tree.project_ids.has(Number(project.id))) { - if (!project.firstTransactionEvent) { - projectsWithNoPerformance.push(project); - if (project.platform && withPerformanceOnboarding.has(project.platform)) { - projectsWithOnboardingChecklist.push(project); - } - } - } - } - - return {projectsWithNoPerformance, projectsWithOnboardingChecklist}; -} - -export function PerformanceSetupWarning({ - traceSlug, - tree, - organization, -}: OnlyOrphanErrorWarningsProps) { - const {projects} = useProjects(); - - const {projectsWithNoPerformance, projectsWithOnboardingChecklist} = useMemo(() => { - return filterProjects(projects, tree); - }, [projects, tree]); - - const LOCAL_STORAGE_KEY = `${traceSlug}:performance-orphan-error-onboarding-banner-hide`; - - useEffect(() => { - if ( - projectsWithOnboardingChecklist.length > 0 && - location.hash === '#performance-sidequest' - ) { - SidebarPanelStore.activatePanel(SidebarPanelKey.PERFORMANCE_ONBOARDING); - } - }, [projectsWithOnboardingChecklist]); - - const {dismiss: snooze, isDismissed: isSnoozed} = useDismissAlert({ - key: LOCAL_STORAGE_KEY, - expirationDays: 7, - }); - - const {dismiss, isDismissed} = useDismissAlert({ - key: LOCAL_STORAGE_KEY, - expirationDays: 365, - }); - - if ( - tree.type !== 'trace' || - tree.shape !== TraceType.ONLY_ERRORS || - projectsWithNoPerformance.length === 0 - ) { - return null; - } - - if (projectsWithOnboardingChecklist.length === 0) { - return ( - - {tct( - "Some of the projects associated with this trace don't support performance monitoring. To learn more about how to setup performance monitoring, visit our [documentation].", - { - documentationLink: ( - - {t('documentation')} - - ), - } - )} - - ); - } - - if (isDismissed || isSnoozed) { - return null; - } - - return ( - - - {t('Your setup is incomplete')} - - {t( - "Want to know why this string of errors happened? Configure performance monitoring to get a full picture of what's going on." - )} - - - - - - - - - - - {} - , - }} - size="xs" - items={[ - { - key: 'dismiss', - label: t('Dismiss'), - onAction: () => { - dismiss(); - }, - }, - { - key: 'snooze', - label: t('Snooze'), - onAction: () => { - snooze(); - }, - }, - ]} - /> - - ); -} - -const BannerWrapper = styled('div')` - position: relative; - border: 1px solid ${p => p.theme.border}; - border-radius: ${p => p.theme.borderRadius}; - padding: ${space(2)} ${space(3)}; - margin-bottom: ${space(2)}; - background: linear-gradient( - 90deg, - ${p => p.theme.backgroundSecondary}00 0%, - ${p => p.theme.backgroundSecondary}FF 70%, - ${p => p.theme.backgroundSecondary}FF 100% - ); - container-type: inline-size; -`; - -const ActionsWrapper = styled('div')` - max-width: 50%; -`; - -const ButtonsWrapper = styled('div')` - display: flex; - align-items: center; - gap: ${space(0.5)}; -`; - -const BannerTitle = styled('div')` - font-size: ${p => p.theme.fontSizeExtraLarge}; - margin-bottom: ${space(1)}; - font-weight: ${p => p.theme.fontWeightBold}; -`; - -const BannerDescription = styled('div')` - margin-bottom: ${space(1.5)}; -`; - -const CloseDropdownMenu = styled(DropdownMenu)` - position: absolute; - display: block; - top: ${space(1)}; - right: ${space(1)}; - color: ${p => p.theme.white}; - cursor: pointer; - z-index: 1; -`; - -const Background = styled('div')<{image: any}>` - display: flex; - justify-self: flex-end; - position: absolute; - top: 14px; - right: 15px; - height: 81%; - width: 100%; - max-width: 413px; - background-image: url(${p => p.image}); - background-repeat: no-repeat; - background-size: contain; - - @container (max-width: 840px) { - display: none; - } -`; - -const ActionButton = styled('div')` - display: flex; - gap: ${space(1)}; -`; From 438743c02061786a21d730bd01529d4a53b414fe Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Wed, 31 Jul 2024 11:44:48 -0700 Subject: [PATCH 16/52] chore(github): fix typing for webhooks and RepositoryProvider (#75351) --- pyproject.toml | 3 --- src/sentry/integrations/github/repository.py | 4 +++- src/sentry/integrations/github/webhook.py | 9 ++++----- src/sentry/integrations/github_enterprise/webhook.py | 8 ++++++-- 4 files changed, 13 insertions(+), 11 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index eb3509c242fcd..22230640af713 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -264,10 +264,7 @@ module = [ "sentry.integrations.github.client", "sentry.integrations.github.integration", "sentry.integrations.github.issues", - "sentry.integrations.github.repository", - "sentry.integrations.github.webhook", "sentry.integrations.github_enterprise.integration", - "sentry.integrations.github_enterprise.webhook", "sentry.integrations.gitlab.client", "sentry.integrations.gitlab.integration", "sentry.integrations.gitlab.issues", diff --git a/src/sentry/integrations/github/repository.py b/src/sentry/integrations/github/repository.py index c12cc980671ba..5ac84d7fb079f 100644 --- a/src/sentry/integrations/github/repository.py +++ b/src/sentry/integrations/github/repository.py @@ -76,6 +76,9 @@ def eval_commits(client: Any) -> Sequence[Mapping[str, Any]]: if integration_id is None: raise NotImplementedError("GitHub apps requires an integration id to fetch commits") integration = integration_service.get_integration(integration_id=integration_id) + if integration is None: + raise NotImplementedError("GitHub apps requires a valid integration to fetch commits") + installation = integration.get_installation(organization_id=repo.organization_id) client = installation.get_client() @@ -83,7 +86,6 @@ def eval_commits(client: Any) -> Sequence[Mapping[str, Any]]: return eval_commits(client) except Exception as e: installation.raise_error(e) - return [] def _format_commits( self, diff --git a/src/sentry/integrations/github/webhook.py b/src/sentry/integrations/github/webhook.py index 38046f81f316f..269f6591163f6 100644 --- a/src/sentry/integrations/github/webhook.py +++ b/src/sentry/integrations/github/webhook.py @@ -10,7 +10,7 @@ import orjson from dateutil.parser import parse as parse_date from django.db import IntegrityError, router, transaction -from django.http import HttpResponse +from django.http import HttpRequest, HttpResponse from django.utils.crypto import constant_time_compare from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt @@ -592,15 +592,14 @@ class GitHubIntegrationsWebhookEndpoint(Endpoint): "POST": ApiPublishStatus.PRIVATE, } - _handlers = { + _handlers: dict[str, Callable[[], Callable[[Any], Any]]] = { "push": PushEventWebhook, "pull_request": PullRequestEventWebhook, "installation": InstallationEventWebhook, } def get_handler(self, event_type: str) -> Callable[[], Callable[[Any], Any]] | None: - handler: Callable[[], Callable[[Any], Any]] | None = self._handlers.get(event_type) - return handler + return self._handlers.get(event_type) def is_valid_signature(self, method: str, body: bytes, secret: str, signature: str) -> bool: if method == "sha1": @@ -612,7 +611,7 @@ def is_valid_signature(self, method: str, body: bytes, secret: str, signature: s return constant_time_compare(expected, signature) @method_decorator(csrf_exempt) - def dispatch(self, request: Request, *args: Any, **kwargs: Any) -> HttpResponse: + def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse: if request.method != "POST": return HttpResponse(status=405) diff --git a/src/sentry/integrations/github_enterprise/webhook.py b/src/sentry/integrations/github_enterprise/webhook.py index 940aae34891f8..1be7f70e12460 100644 --- a/src/sentry/integrations/github_enterprise/webhook.py +++ b/src/sentry/integrations/github_enterprise/webhook.py @@ -4,6 +4,8 @@ import hmac import logging import re +from collections.abc import Callable +from typing import Any import orjson import sentry_sdk @@ -124,6 +126,8 @@ class GitHubEnterpriseWebhookBase(Endpoint): authentication_classes = () permission_classes = () + _handlers: dict[str, Callable[[], Callable[[Any], Any]]] = {} + # https://developer.github.com/webhooks/ def get_handler(self, event_type): return self._handlers.get(event_type) @@ -144,7 +148,7 @@ def is_valid_signature(self, method, body, secret, signature): return constant_time_compare(expected, signature) @method_decorator(csrf_exempt) - def dispatch(self, request: Request, *args, **kwargs) -> HttpResponse: + def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse: if request.method != "POST": return HttpResponse(status=405) @@ -303,7 +307,7 @@ class GitHubEnterpriseWebhookEndpoint(GitHubEnterpriseWebhookBase): } @method_decorator(csrf_exempt) - def dispatch(self, request: Request, *args, **kwargs) -> HttpResponse: + def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse: if request.method != "POST": return HttpResponse(status=405) From 9ca135c3b0b51275dd2deae58f995d844612ce34 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 31 Jul 2024 15:05:39 -0400 Subject: [PATCH 17/52] ref: remove OrganizationAbsoluteUrlMixin (#75360) blocker for upgrading to django-stubs 5.0.4 - handled deprecated names in getsentry: https://github.com/getsentry/getsentry/pull/14786 - introduced new names in sentry: https://github.com/getsentry/sentry/pull/75340 --- src/sentry/api/utils.py | 4 -- src/sentry/models/organization.py | 26 ++++++++++++- .../services/organization/model.py | 27 +++++++++++++- src/sentry/types/organization.py | 37 ------------------- 4 files changed, 49 insertions(+), 45 deletions(-) delete mode 100644 src/sentry/types/organization.py diff --git a/src/sentry/api/utils.py b/src/sentry/api/utils.py index 294b84bf509cf..6c86c5639a5dd 100644 --- a/src/sentry/api/utils.py +++ b/src/sentry/api/utils.py @@ -29,10 +29,6 @@ from sentry.models.apitoken import is_api_token_auth from sentry.models.organization import Organization from sentry.models.orgauthtoken import is_org_auth_token_auth -from sentry.organizations.absolute_url import ( # noqa: F401 # XXX: for compatibility, remove after getsentry is updated - customer_domain_path, - generate_organization_url, -) from sentry.organizations.services.organization import ( RpcOrganization, RpcOrganizationMember, diff --git a/src/sentry/models/organization.py b/src/sentry/models/organization.py index 1de05bcd776b3..0269768de09e9 100644 --- a/src/sentry/models/organization.py +++ b/src/sentry/models/organization.py @@ -32,8 +32,8 @@ from sentry.locks import locks from sentry.models.outbox import OutboxCategory from sentry.notifications.services import notifications_service +from sentry.organizations.absolute_url import has_customer_domain, organization_absolute_url from sentry.roles.manager import Role -from sentry.types.organization import OrganizationAbsoluteUrlMixin from sentry.users.services.user import RpcUser, RpcUserProfile from sentry.users.services.user.service import user_service from sentry.utils.http import is_using_customer_domain @@ -145,7 +145,7 @@ def get_organizations_where_user_is_owner(self, user_id: int) -> QuerySet: @snowflake_id_model @region_silo_model -class Organization(ReplicatedRegionModel, OrganizationAbsoluteUrlMixin): +class Organization(ReplicatedRegionModel): """ An organization represents a group of individuals which maintain ownership of projects. """ @@ -467,6 +467,28 @@ def get_url(slug: str) -> str: except NoReverseMatch: return reverse(Organization.get_url_viewname()) + @cached_property + def __has_customer_domain(self) -> bool: + """ + Check if the current organization is using or has access to customer domains. + """ + return has_customer_domain() + + def absolute_url(self, path: str, query: str | None = None, fragment: str | None = None) -> str: + """ + Get an absolute URL to `path` for this organization. + + This method takes customer-domains into account and will update the path when + customer-domains are active. + """ + return organization_absolute_url( + has_customer_domain=self.__has_customer_domain, + slug=self.slug, + path=path, + query=query, + fragment=fragment, + ) + def get_scopes(self, role: Role) -> frozenset[str]: """ Note that scopes for team-roles are filtered through this method too. diff --git a/src/sentry/organizations/services/organization/model.py b/src/sentry/organizations/services/organization/model.py index 537ca783b6024..5445f9cd3e329 100644 --- a/src/sentry/organizations/services/organization/model.py +++ b/src/sentry/organizations/services/organization/model.py @@ -5,6 +5,7 @@ from collections.abc import Callable, Iterable, Mapping, Sequence from datetime import datetime from enum import IntEnum +from functools import cached_property from typing import Any from django.dispatch import Signal @@ -14,12 +15,12 @@ from sentry import roles from sentry.hybridcloud.rpc import RpcModel +from sentry.organizations.absolute_url import has_customer_domain, organization_absolute_url from sentry.projects.services.project import RpcProject from sentry.roles import team_roles from sentry.roles.manager import TeamRole from sentry.signals import sso_enabled from sentry.silo.base import SiloMode -from sentry.types.organization import OrganizationAbsoluteUrlMixin from sentry.users.services.user.model import RpcUser @@ -204,7 +205,7 @@ class RpcOrganizationInvite(RpcModel): email: str = "" -class RpcOrganizationSummary(RpcModel, OrganizationAbsoluteUrlMixin): +class RpcOrganizationSummary(RpcModel): """ The subset of organization metadata available from the control silo specifically. """ @@ -241,6 +242,28 @@ def delete_option(self, key: str) -> None: organization_service.delete_option(organization_id=self.id, key=key) + @cached_property + def __has_customer_domain(self) -> bool: + """ + Check if the current organization is using or has access to customer domains. + """ + return has_customer_domain() + + def absolute_url(self, path: str, query: str | None = None, fragment: str | None = None) -> str: + """ + Get an absolute URL to `path` for this organization. + + This method takes customer-domains into account and will update the path when + customer-domains are active. + """ + return organization_absolute_url( + has_customer_domain=self.__has_customer_domain, + slug=self.slug, + path=path, + query=query, + fragment=fragment, + ) + class RpcOrganization(RpcOrganizationSummary): # Represents the full set of teams and projects associated with the org. Note that these are not filtered by diff --git a/src/sentry/types/organization.py b/src/sentry/types/organization.py deleted file mode 100644 index 9535b4c64b184..0000000000000 --- a/src/sentry/types/organization.py +++ /dev/null @@ -1,37 +0,0 @@ -from __future__ import annotations - -from functools import cached_property - -from django.db import models - -from sentry.organizations.absolute_url import has_customer_domain, organization_absolute_url - - -class OrganizationAbsoluteUrlMixin: - slug: str | models.Field[str, str] - - @cached_property - def __has_customer_domain(self) -> bool: - """ - Check if the current organization is using or has access to customer domains. - """ - return has_customer_domain() - - def _has_customer_domain(self) -> bool: - # For getsentry compatibility - return self.__has_customer_domain - - def absolute_url(self, path: str, query: str | None = None, fragment: str | None = None) -> str: - """ - Get an absolute URL to `path` for this organization. - - This method takes customer-domains into account and will update the path when - customer-domains are active. - """ - return organization_absolute_url( - has_customer_domain=self.__has_customer_domain, - slug=self.slug, - path=path, - query=query, - fragment=fragment, - ) From 0d8acb91962cced0f4287d1391d1c1d7b5cca5cb Mon Sep 17 00:00:00 2001 From: Gabe Villalobos Date: Wed, 31 Jul 2024 12:11:32 -0700 Subject: [PATCH 18/52] chore(hybrid-cloud): Resubmits Pydantic v2.7 upgrade (#75311) --- pyproject.toml | 3 +++ requirements-base.txt | 2 +- requirements-dev-frozen.txt | 4 +++- requirements-frozen.txt | 4 +++- src/sentry/autofix/utils.py | 2 +- src/sentry/hybridcloud/rpc/__init__.py | 10 ++++++---- src/sentry/hybridcloud/rpc/sig.py | 6 +++++- src/sentry/types/region.py | 5 +++-- src/sentry/users/services/user/serial.py | 2 +- .../api/endpoints/test_organization_sdk_updates.py | 9 ++++++++- ...t_organization_sentry_app_installation_details.py | 12 +++++++++++- tests/sentry/test_dependencies.py | 8 -------- 12 files changed, 45 insertions(+), 22 deletions(-) delete mode 100644 tests/sentry/test_dependencies.py diff --git a/pyproject.toml b/pyproject.toml index 22230640af713..7ad5a5a9a2250 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,9 @@ filterwarnings = [ # pytest has not yet implemented the replacement for this yet "ignore:The --looponfail command line argument.*", + + # Temporarily disable deprecation warnings for pydantic while we upgrade it + "ignore::DeprecationWarning:pydantic.*", ] looponfailroots = ["src", "tests"] diff --git a/requirements-base.txt b/requirements-base.txt index b23a105c3acde..1fa5e26a651c4 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -45,7 +45,7 @@ python-rapidjson>=1.4 psutil>=5.9.2 psycopg2-binary>=2.9.9 PyJWT>=2.4.0 -pydantic>=1.10.17,<2 +pydantic>=2.5.0 python-dateutil>=2.9.0 pymemcache python-u2flib-server>=5.0.0 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 8af6ea681a16a..548b32530c707 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -7,6 +7,7 @@ --index-url https://pypi.devinfra.sentry.io/simple amqp==5.2.0 +annotated-types==0.7.0 anyio==3.7.1 asgiref==3.7.2 attrs==23.1.0 @@ -137,7 +138,8 @@ pyasn1-modules==0.2.4 pycodestyle==2.11.0 pycountry==17.5.14 pycparser==2.21 -pydantic==1.10.17 +pydantic==2.7.4 +pydantic-core==2.18.4 pyflakes==3.2.0 pyjwt==2.4.0 pymemcache==4.0.0 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 2b1ca6ad44806..668c1d5e2fae1 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -7,6 +7,7 @@ --index-url https://pypi.devinfra.sentry.io/simple amqp==5.2.0 +annotated-types==0.7.0 anyio==3.7.1 asgiref==3.7.2 attrs==23.1.0 @@ -96,7 +97,8 @@ pyasn1==0.4.5 pyasn1-modules==0.2.4 pycountry==17.5.14 pycparser==2.21 -pydantic==1.10.17 +pydantic==2.7.4 +pydantic-core==2.18.4 pyjwt==2.4.0 pymemcache==4.0.0 pyparsing==3.0.9 diff --git a/src/sentry/autofix/utils.py b/src/sentry/autofix/utils.py index ede2a34cb2cb8..d100ddb7ece07 100644 --- a/src/sentry/autofix/utils.py +++ b/src/sentry/autofix/utils.py @@ -1,11 +1,11 @@ import datetime import enum -from typing import TypedDict import orjson import requests from django.conf import settings from pydantic import BaseModel +from typing_extensions import TypedDict from sentry.integrations.utils.code_mapping import get_sorted_code_mapping_configs from sentry.models.project import Project diff --git a/src/sentry/hybridcloud/rpc/__init__.py b/src/sentry/hybridcloud/rpc/__init__.py index d644e115ef78c..cc6d2b2fbcd23 100644 --- a/src/sentry/hybridcloud/rpc/__init__.py +++ b/src/sentry/hybridcloud/rpc/__init__.py @@ -11,6 +11,7 @@ import pydantic from django.db import router, transaction from django.db.models import Model +from pydantic import ConfigDict from sentry.silo.base import SiloMode from sentry.utils.env import in_test_environment @@ -43,13 +44,14 @@ def __hash__(self) -> int: class RpcModel(pydantic.BaseModel): """A serializable object that may be part of an RPC schema.""" - class Config: - orm_mode = True - use_enum_values = True + # TODO(Hybrid-Cloud): Remove number coercion after pydantic V2 stabilized + model_config = ConfigDict( + from_attributes=True, use_enum_values=True, coerce_numbers_to_str=True + ) @classmethod def get_field_names(cls) -> Iterable[str]: - return iter(cls.__fields__.keys()) + return iter(cls.model_fields.keys()) @classmethod def serialize_by_field_name( diff --git a/src/sentry/hybridcloud/rpc/sig.py b/src/sentry/hybridcloud/rpc/sig.py index 7782d5693cd6f..0331be3f85d55 100644 --- a/src/sentry/hybridcloud/rpc/sig.py +++ b/src/sentry/hybridcloud/rpc/sig.py @@ -7,6 +7,7 @@ import pydantic from django.utils.functional import LazyObject +from pydantic import ConfigDict from sentry.hybridcloud.rpc import ArgumentDict @@ -81,7 +82,10 @@ def create_field(param: inspect.Parameter) -> tuple[Any, Any]: if self.is_instance_method: parameters = parameters[1:] # exclude `self` argument field_definitions = {p.name: create_field(p) for p in parameters} - return pydantic.create_model(model_name, **field_definitions) # type: ignore[call-overload] + + # TODO(Hybrid-Cloud): Remove number coercion after pydantic V2 stabilized + config = ConfigDict(coerce_numbers_to_str=True) + return pydantic.create_model(model_name, __config__=config, **field_definitions) # type: ignore[call-overload] _RETURN_MODEL_ATTR = "value" diff --git a/src/sentry/types/region.py b/src/sentry/types/region.py index 50a8614d1875a..ac0cabc1241ef 100644 --- a/src/sentry/types/region.py +++ b/src/sentry/types/region.py @@ -8,8 +8,8 @@ import sentry_sdk from django.conf import settings from django.http import HttpRequest +from pydantic import TypeAdapter from pydantic.dataclasses import dataclass -from pydantic.tools import parse_obj_as from sentry import options from sentry.silo.base import SiloMode, SingleProcessSiloModeState, control_silo_function @@ -151,7 +151,8 @@ def validate_all(self) -> None: def _parse_raw_config(region_config: Any) -> Iterable[Region]: if isinstance(region_config, (str, bytes)): json_config_values = json.loads(region_config) - config_values = parse_obj_as(list[Region], json_config_values) + adapter = TypeAdapter(list[Region]) + config_values = adapter.validate_python(json_config_values) else: config_values = region_config diff --git a/src/sentry/users/services/user/serial.py b/src/sentry/users/services/user/serial.py index 30987ad398cdc..eef96de5b4824 100644 --- a/src/sentry/users/services/user/serial.py +++ b/src/sentry/users/services/user/serial.py @@ -36,7 +36,7 @@ def serialize_generic_user(user: Any) -> RpcUser | None: def _serialize_from_user_fields(user: User) -> dict[str, Any]: args = { field_name: getattr(user, field_name) - for field_name in RpcUserProfile.__fields__ + for field_name in RpcUserProfile.model_fields if hasattr(user, field_name) } args["pk"] = user.pk diff --git a/tests/sentry/api/endpoints/test_organization_sdk_updates.py b/tests/sentry/api/endpoints/test_organization_sdk_updates.py index 5625a2213546d..1d64fda6b5af4 100644 --- a/tests/sentry/api/endpoints/test_organization_sdk_updates.py +++ b/tests/sentry/api/endpoints/test_organization_sdk_updates.py @@ -2,6 +2,7 @@ import pytest from django.urls import reverse +from pydantic import PydanticDeprecatedSince20 from sentry.sdk_updates import SdkIndexState from sentry.testutils.cases import APITestCase, SnubaTestCase @@ -188,8 +189,14 @@ def test_unknown_version(self, mock_index_state): update_suggestions = response.data assert len(update_suggestions) == 0 + # TODO(Gabe): Temporary kludge to allow this to pass while pydantic + # deprecation warnings are active. + filtered_warnings = [ + info for info in warninfo if not isinstance(info.message, PydanticDeprecatedSince20) + ] + # until it is turned into an error, we'll get a warning about parsing an invalid version - (warning,) = warninfo + (warning,) = filtered_warnings assert isinstance(warning.message, DeprecationWarning) (warn_msg,) = warning.message.args assert ( diff --git a/tests/sentry/api/endpoints/test_organization_sentry_app_installation_details.py b/tests/sentry/api/endpoints/test_organization_sentry_app_installation_details.py index 2f690b4d40305..81884462f4471 100644 --- a/tests/sentry/api/endpoints/test_organization_sentry_app_installation_details.py +++ b/tests/sentry/api/endpoints/test_organization_sentry_app_installation_details.py @@ -1,3 +1,4 @@ +import unittest from unittest.mock import patch import responses @@ -111,7 +112,16 @@ def test_delete_install(self, record, run): assert AuditLogEntry.objects.filter( event=audit_log.get_event_id("SENTRY_APP_UNINSTALL") ).exists() - run.assert_called_once_with(install=self.orm_installation2, user=rpc_user, action="deleted") + + # user is wrapped in a SimpleLazyObject, meaning we can't assert equality + # via the method parameter check. Manually retrieve and check it instead. + run.assert_called_once_with( + install=self.orm_installation2, user=unittest.mock.ANY, action="deleted" + ) + + mock_call_user = run.mock_calls[0].kwargs.get("user") + assert mock_call_user == rpc_user + record.assert_called_with( "sentry_app.uninstalled", user_id=self.user.id, diff --git a/tests/sentry/test_dependencies.py b/tests/sentry/test_dependencies.py deleted file mode 100644 index eebd8e5fe696c..0000000000000 --- a/tests/sentry/test_dependencies.py +++ /dev/null @@ -1,8 +0,0 @@ -import pydantic - - -def test_pydantic_1x_compiled() -> None: - if not pydantic.VERSION.startswith("1."): - raise AssertionError("delete this test, it only applies to pydantic 1.x") - # pydantic is horribly slow when not cythonized - assert pydantic.__file__.endswith(".so") From b51c4e2c44fe5c51c41c567d18a9dfc9c9ea4ef4 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 31 Jul 2024 15:31:27 -0400 Subject: [PATCH 19/52] ref: fix type errors pointed out by django-stubs 5.0.4 (#75359) ``` src/sentry/integrations/services/repository/impl.py:55: error: Incompatible type for lookup 'status': (got "ObjectStatus", expected "str | int") [misc] src/sentry/sentry_apps/services/app/impl.py:193: error: Incompatible type for lookup 'api_token_id': (got "str", expected "ApiToken | int | None") [misc] ``` --- src/sentry/integrations/services/repository/impl.py | 2 +- src/sentry/sentry_apps/services/app/model.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sentry/integrations/services/repository/impl.py b/src/sentry/integrations/services/repository/impl.py index 73e6c64818d4f..f23563ab05dc6 100644 --- a/src/sentry/integrations/services/repository/impl.py +++ b/src/sentry/integrations/services/repository/impl.py @@ -38,7 +38,7 @@ def get_repositories( providers: list[str] | None = None, has_integration: bool | None = None, has_provider: bool | None = None, - status: ObjectStatus | None = None, + status: int | None = None, ) -> list[RpcRepository]: query = Repository.objects.filter(organization_id=organization_id) if integration_id is not None: diff --git a/src/sentry/sentry_apps/services/app/model.py b/src/sentry/sentry_apps/services/app/model.py index 0f5cfe0ee4d0d..09c885dd3547f 100644 --- a/src/sentry/sentry_apps/services/app/model.py +++ b/src/sentry/sentry_apps/services/app/model.py @@ -153,5 +153,5 @@ class SentryAppInstallationFilterArgs(TypedDict, total=False): organization_id: int uuids: list[str] status: int - api_token_id: str + api_token_id: int api_installation_token_id: str From d2d039ae0ebd92ff3b5328670468c6174640e7c8 Mon Sep 17 00:00:00 2001 From: Shruthi Date: Wed, 31 Jul 2024 15:54:07 -0400 Subject: [PATCH 20/52] docs(discover): API docs for discover endpoint (#75325) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ![Screenshot 2024-07-31 at 10 02 14 AM](https://github.com/user-attachments/assets/a7a8dfee-fccd-465e-800c-a8731f8669ce) ![Screenshot 2024-07-31 at 10 02 05 AM](https://github.com/user-attachments/assets/940fd084-5265-4a21-b6a1-7e3ec8412974) ![Screenshot 2024-07-31 at 10 00 28 AM](https://github.com/user-attachments/assets/da4165a4-e560-4007-b5be-caa53dd196e1) ![Screenshot 2024-07-31 at 10 00 19 AM](https://github.com/user-attachments/assets/fc9722c8-c904-4aeb-928a-af3abe8969ef) ![Screenshot 2024-07-31 at 10 00 11 AM](https://github.com/user-attachments/assets/7eb19abe-0931-4e5d-8701-5d2959a822ac) --------- Co-authored-by: Liza Mock --- .../serializers/models/discoversavedquery.py | 46 ++++- .../examples/discover_saved_query_examples.py | 168 ++++++++++++++++++ src/sentry/apidocs/parameters.py | 37 ++++ .../endpoints/discover_homepage_query.py | 6 +- .../endpoints/discover_saved_queries.py | 55 +++++- .../endpoints/discover_saved_query_detail.py | 58 +++++- src/sentry/discover/endpoints/serializers.py | 98 ++++++++-- 7 files changed, 436 insertions(+), 32 deletions(-) create mode 100644 src/sentry/apidocs/examples/discover_saved_query_examples.py diff --git a/src/sentry/api/serializers/models/discoversavedquery.py b/src/sentry/api/serializers/models/discoversavedquery.py index fd6f9cad4cf36..5ce8cfa4867f5 100644 --- a/src/sentry/api/serializers/models/discoversavedquery.py +++ b/src/sentry/api/serializers/models/discoversavedquery.py @@ -1,7 +1,8 @@ from collections import defaultdict -from typing import DefaultDict +from typing import DefaultDict, TypedDict from sentry.api.serializers import Serializer, register +from sentry.api.serializers.models.user import UserSerializerResponse from sentry.constants import ALL_ACCESS_PROJECTS from sentry.discover.models import DatasetSourcesTypes, DiscoverSavedQuery, DiscoverSavedQueryTypes from sentry.users.services.user.service import user_service @@ -10,8 +11,39 @@ DATASET_SOURCES = dict(DatasetSourcesTypes.as_choices()) +class DiscoverSavedQueryResponseOptional(TypedDict, total=False): + environment: list[str] + query: str + fields: list[str] + widths: list[str] + conditions: list[str] + aggregations: list[str] + range: str + start: str + end: str + orderby: str + limit: str + yAxis: list[str] + display: str + topEvents: int + interval: str + + +class DiscoverSavedQueryResponse(DiscoverSavedQueryResponseOptional): + id: str + name: str + projects: list[int] + version: int + queryDataset: str + datasetSource: str + expired: bool + dateCreated: str + dateUpdated: str + createdBy: UserSerializerResponse + + @register(DiscoverSavedQuery) -class DiscoverSavedQuerySerializer(Serializer): +class DiscoverSavedQueryModelSerializer(Serializer): def get_attrs(self, item_list, user, **kwargs): result: DefaultDict[str, dict] = defaultdict(lambda: {"created_by": {}}) @@ -34,7 +66,7 @@ def get_attrs(self, item_list, user, **kwargs): return result - def serialize(self, obj, attrs, user, **kwargs): + def serialize(self, obj, attrs, user, **kwargs) -> DiscoverSavedQueryResponse: query_keys = [ "environment", "query", @@ -52,7 +84,7 @@ def serialize(self, obj, attrs, user, **kwargs): "topEvents", "interval", ] - data = { + data: DiscoverSavedQueryResponse = { "id": str(obj.id), "name": obj.name, "projects": [project.id for project in obj.projects.all()], @@ -67,15 +99,17 @@ def serialize(self, obj, attrs, user, **kwargs): for key in query_keys: if obj.query.get(key) is not None: - data[key] = obj.query[key] + data[key] = obj.query[key] # type: ignore[literal-required] # expire queries that are beyond the retention period if "start" in obj.query: start, end = parse_timestamp(obj.query["start"]), parse_timestamp(obj.query["end"]) if start and end: - data["expired"], data["start"] = outside_retention_with_modified_start( + expired, modified_start = outside_retention_with_modified_start( start, end, obj.organization ) + data["expired"] = expired + data["start"] = modified_start.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if obj.query.get("all_projects"): data["projects"] = list(ALL_ACCESS_PROJECTS) diff --git a/src/sentry/apidocs/examples/discover_saved_query_examples.py b/src/sentry/apidocs/examples/discover_saved_query_examples.py new file mode 100644 index 0000000000000..c26a300aa34d8 --- /dev/null +++ b/src/sentry/apidocs/examples/discover_saved_query_examples.py @@ -0,0 +1,168 @@ +from drf_spectacular.utils import OpenApiExample + +DISCOVER_SAVED_QUERY_OBJ = { + "id": "1", + "name": "Transactions by Volume", + "projects": [], + "version": 2, + "queryDataset": "discover", + "datasetSource": "unknown", + "expired": False, + "dateCreated": "2024-07-25T19:35:38.422859Z", + "dateUpdated": "2024-07-25T19:35:38.422874Z", + "environment": [], + "query": "transaction:/api/foo", + "fields": ["transaction", "project", "count()", "avg(transaction.duration)", "p75()", "p95()"], + "widths": [], + "range": "24h", + "orderby": "-count", + "yAxis": ["count()"], + "createdBy": { + "id": "1", + "name": "Admin", + "username": "admin", + "email": "admin@sentry.io", + "avatarUrl": "www.example.com", + "isActive": True, + "hasPasswordAuth": True, + "isManaged": False, + "dateJoined": "2021-10-25T17:07:33.190596Z", + "lastLogin": "2024-07-16T15:28:39.261659Z", + "has2fa": True, + "lastActive": "2024-07-16T20:45:49.364197Z", + "isSuperuser": False, + "isStaff": False, + "experiments": {}, + "emails": [{"id": "1", "email": "admin@sentry.io", "is_verified": True}], + "avatar": { + "avatarType": "letter_avatar", + "avatarUuid": None, + "avatarUrl": "www.example.com", + }, + }, +} + +SAVED_QUERIES = [ + { + "id": "1", + "name": "Transactions by Volume", + "projects": [], + "version": 2, + "queryDataset": "transaction-like", + "datasetSource": "unknown", + "expired": False, + "dateCreated": "2024-07-25T19:35:38.422859Z", + "dateUpdated": "2024-07-25T19:35:38.422874Z", + "environment": [], + "query": "", + "fields": [ + "id", + "transaction", + "timestamp", + ], + "widths": [], + "range": "24h", + "orderby": "-timestamp", + "yAxis": ["count()"], + "createdBy": { + "id": "1", + "name": "Admin", + "username": "admin", + "email": "admin@sentry.io", + "avatarUrl": "www.example.com", + "isActive": True, + "hasPasswordAuth": True, + "isManaged": False, + "dateJoined": "2021-10-25T17:07:33.190596Z", + "lastLogin": "2024-07-16T15:28:39.261659Z", + "has2fa": True, + "lastActive": "2024-07-16T20:45:49.364197Z", + "isSuperuser": False, + "isStaff": False, + "experiments": {}, + "emails": [{"id": "1", "email": "admin@sentry.io", "is_verified": True}], + "avatar": { + "avatarType": "letter_avatar", + "avatarUuid": None, + "avatarUrl": "www.example.com", + }, + }, + }, + { + "id": "2", + "name": "All Events", + "projects": [], + "version": 2, + "queryDataset": "discover", + "datasetSource": "unknown", + "expired": False, + "dateCreated": "2024-07-25T19:35:38.422859Z", + "dateUpdated": "2024-07-25T19:35:38.422874Z", + "environment": [], + "query": "transaction:/api/foo", + "fields": [ + "transaction", + "project", + "count()", + "avg(transaction.duration)", + "p75()", + "p95()", + ], + "widths": [], + "range": "24h", + "orderby": "-count", + "yAxis": ["count()"], + "createdBy": { + "id": "1", + "name": "Admin", + "username": "admin", + "email": "admin@sentry.io", + "avatarUrl": "www.example.com", + "isActive": True, + "hasPasswordAuth": True, + "isManaged": False, + "dateJoined": "2021-10-25T17:07:33.190596Z", + "lastLogin": "2024-07-16T15:28:39.261659Z", + "has2fa": True, + "lastActive": "2024-07-16T20:45:49.364197Z", + "isSuperuser": False, + "isStaff": False, + "experiments": {}, + "emails": [{"id": "1", "email": "admin@sentry.io", "is_verified": True}], + "avatar": { + "avatarType": "letter_avatar", + "avatarUuid": None, + "avatarUrl": "www.example.com", + }, + }, + }, +] + + +class DiscoverExamples: + DISCOVER_SAVED_QUERY_GET_RESPONSE = [ + OpenApiExample( + "Discover Saved Query GET response", + value=DISCOVER_SAVED_QUERY_OBJ, + status_codes=["200"], + response_only=True, + ) + ] + + DISCOVER_SAVED_QUERY_POST_RESPONSE = [ + OpenApiExample( + "Create Discover Saved Query", + value=DISCOVER_SAVED_QUERY_OBJ, + status_codes=["201"], + response_only=True, + ) + ] + + DISCOVER_SAVED_QUERIES_QUERY_RESPONSE = [ + OpenApiExample( + "Get Discover Saved Queries", + value=SAVED_QUERIES, + status_codes=["200"], + response_only=True, + ) + ] diff --git a/src/sentry/apidocs/parameters.py b/src/sentry/apidocs/parameters.py index c0accacae4a8f..b23f86f216709 100644 --- a/src/sentry/apidocs/parameters.py +++ b/src/sentry/apidocs/parameters.py @@ -612,3 +612,40 @@ class DashboardParams: type=int, description="""The ID of the dashboard you'd like to retrieve.""", ) + + +class DiscoverSavedQueryParams: + DISCOVER_SAVED_QUERY_ID = OpenApiParameter( + name="query_id", + location="path", + required=True, + type=int, + description="""The ID of the Discover query you'd like to retrieve.""", + ) + + +class DiscoverSavedQueriesParams: + QUERY = OpenApiParameter( + name="query", + location="query", + required=False, + type=str, + description="""The name of the Discover query you'd like to filter by.""", + ) + + SORT = OpenApiParameter( + name="sortBy", + location="query", + required=False, + type=str, + description="""The property to sort results by. If not specified, the results are sorted by query name. + +Available fields are: +- name +- dateCreated +- dateUpdated +- mostPopular +- recentlyViewed +- myqueries + """, + ) diff --git a/src/sentry/discover/endpoints/discover_homepage_query.py b/src/sentry/discover/endpoints/discover_homepage_query.py index 7e6d379f9a13b..44abebde7c399 100644 --- a/src/sentry/discover/endpoints/discover_homepage_query.py +++ b/src/sentry/discover/endpoints/discover_homepage_query.py @@ -25,9 +25,9 @@ def get_homepage_query(organization, user): @region_silo_endpoint class DiscoverHomepageQueryEndpoint(OrganizationEndpoint): publish_status = { - "DELETE": ApiPublishStatus.UNKNOWN, - "GET": ApiPublishStatus.UNKNOWN, - "PUT": ApiPublishStatus.UNKNOWN, + "DELETE": ApiPublishStatus.PRIVATE, + "GET": ApiPublishStatus.PRIVATE, + "PUT": ApiPublishStatus.PRIVATE, } owner = ApiOwner.PERFORMANCE diff --git a/src/sentry/discover/endpoints/discover_saved_queries.py b/src/sentry/discover/endpoints/discover_saved_queries.py index 96aebd4c5ea98..3f28dea8a7d16 100644 --- a/src/sentry/discover/endpoints/discover_saved_queries.py +++ b/src/sentry/discover/endpoints/discover_saved_queries.py @@ -1,6 +1,7 @@ from __future__ import annotations from django.db.models import Case, IntegerField, When +from drf_spectacular.utils import extend_schema from rest_framework.exceptions import ParseError from rest_framework.request import Request from rest_framework.response import Response @@ -12,17 +13,31 @@ from sentry.api.bases import NoProjects, OrganizationEndpoint from sentry.api.paginator import GenericOffsetPaginator from sentry.api.serializers import serialize +from sentry.api.serializers.models.discoversavedquery import ( + DiscoverSavedQueryModelSerializer, + DiscoverSavedQueryResponse, +) +from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_FORBIDDEN, RESPONSE_NOT_FOUND +from sentry.apidocs.examples.discover_saved_query_examples import DiscoverExamples +from sentry.apidocs.parameters import ( + CursorQueryParam, + DiscoverSavedQueriesParams, + GlobalParams, + VisibilityParams, +) +from sentry.apidocs.utils import inline_sentry_response_serializer from sentry.discover.endpoints.bases import DiscoverSavedQueryPermission from sentry.discover.endpoints.serializers import DiscoverSavedQuerySerializer from sentry.discover.models import DatasetSourcesTypes, DiscoverSavedQuery, DiscoverSavedQueryTypes from sentry.search.utils import tokenize_query +@extend_schema(tags=["Discover"]) @region_silo_endpoint class DiscoverSavedQueriesEndpoint(OrganizationEndpoint): publish_status = { - "GET": ApiPublishStatus.UNKNOWN, - "POST": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PUBLIC, + "POST": ApiPublishStatus.PUBLIC, } owner = ApiOwner.PERFORMANCE permission_classes = (DiscoverSavedQueryPermission,) @@ -32,9 +47,29 @@ def has_feature(self, organization, request): "organizations:discover", organization, actor=request.user ) or features.has("organizations:discover-query", organization, actor=request.user) + @extend_schema( + operation_id="List an Organization's Discover Saved Queries", + parameters=[ + GlobalParams.ORG_ID_OR_SLUG, + VisibilityParams.PER_PAGE, + CursorQueryParam, + DiscoverSavedQueriesParams.QUERY, + DiscoverSavedQueriesParams.SORT, + ], + request=None, + responses={ + 200: inline_sentry_response_serializer( + "DiscoverSavedQueryListResponse", list[DiscoverSavedQueryResponse] + ), + 400: RESPONSE_BAD_REQUEST, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + examples=DiscoverExamples.DISCOVER_SAVED_QUERIES_QUERY_RESPONSE, + ) def get(self, request: Request, organization) -> Response: """ - List saved queries for organization + Retrieve a list of saved queries that are associated with the given organization. """ if not self.has_feature(organization, request): return self.respond(status=404) @@ -112,9 +147,21 @@ def data_fn(offset, limit): default_per_page=25, ) + @extend_schema( + operation_id="Create a New Saved Query", + parameters=[GlobalParams.ORG_ID_OR_SLUG], + request=DiscoverSavedQuerySerializer, + responses={ + 201: DiscoverSavedQueryModelSerializer, + 400: RESPONSE_BAD_REQUEST, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + examples=DiscoverExamples.DISCOVER_SAVED_QUERY_POST_RESPONSE, + ) def post(self, request: Request, organization) -> Response: """ - Create a saved query + Create a new saved query for the given organization. """ if not self.has_feature(organization, request): return self.respond(status=404) diff --git a/src/sentry/discover/endpoints/discover_saved_query_detail.py b/src/sentry/discover/endpoints/discover_saved_query_detail.py index e28d7afc4e0eb..d2529fd74a1ce 100644 --- a/src/sentry/discover/endpoints/discover_saved_query_detail.py +++ b/src/sentry/discover/endpoints/discover_saved_query_detail.py @@ -1,5 +1,6 @@ from django.db.models import F, Q from django.utils import timezone +from drf_spectacular.utils import extend_schema from rest_framework.exceptions import ParseError from rest_framework.request import Request from rest_framework.response import Response @@ -11,6 +12,15 @@ from sentry.api.bases import NoProjects, OrganizationEndpoint from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.serializers import serialize +from sentry.api.serializers.models.discoversavedquery import DiscoverSavedQueryModelSerializer +from sentry.apidocs.constants import ( + RESPONSE_BAD_REQUEST, + RESPONSE_FORBIDDEN, + RESPONSE_NO_CONTENT, + RESPONSE_NOT_FOUND, +) +from sentry.apidocs.examples.discover_saved_query_examples import DiscoverExamples +from sentry.apidocs.parameters import DiscoverSavedQueryParams, GlobalParams from sentry.discover.endpoints.bases import DiscoverSavedQueryPermission from sentry.discover.endpoints.serializers import DiscoverSavedQuerySerializer from sentry.discover.models import DatasetSourcesTypes, DiscoverSavedQuery, DiscoverSavedQueryTypes @@ -35,12 +45,13 @@ def convert_args(self, request: Request, organization_id_or_slug, query_id, *arg return (args, kwargs) +@extend_schema(tags=["Discover"]) @region_silo_endpoint class DiscoverSavedQueryDetailEndpoint(DiscoverSavedQueryBase): publish_status = { - "DELETE": ApiPublishStatus.UNKNOWN, - "GET": ApiPublishStatus.UNKNOWN, - "PUT": ApiPublishStatus.UNKNOWN, + "DELETE": ApiPublishStatus.PUBLIC, + "GET": ApiPublishStatus.PUBLIC, + "PUT": ApiPublishStatus.PUBLIC, } def has_feature(self, organization, request): @@ -48,9 +59,23 @@ def has_feature(self, organization, request): "organizations:discover", organization, actor=request.user ) or features.has("organizations:discover-query", organization, actor=request.user) + @extend_schema( + operation_id="Retrieve an Organization's Discover Saved Query", + parameters=[ + GlobalParams.ORG_ID_OR_SLUG, + DiscoverSavedQueryParams.DISCOVER_SAVED_QUERY_ID, + ], + request=None, + responses={ + 200: DiscoverSavedQueryModelSerializer, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + examples=DiscoverExamples.DISCOVER_SAVED_QUERY_GET_RESPONSE, + ) def get(self, request: Request, organization, query) -> Response: """ - Get a saved query + Retrieve a saved query. """ if not self.has_feature(organization, request): return self.respond(status=404) @@ -59,9 +84,21 @@ def get(self, request: Request, organization, query) -> Response: return Response(serialize(query), status=200) + @extend_schema( + operation_id="Edit an Organization's Discover Saved Query", + parameters=[GlobalParams.ORG_ID_OR_SLUG, DiscoverSavedQueryParams.DISCOVER_SAVED_QUERY_ID], + request=DiscoverSavedQuerySerializer, + responses={ + 200: DiscoverSavedQueryModelSerializer, + 400: RESPONSE_BAD_REQUEST, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + examples=DiscoverExamples.DISCOVER_SAVED_QUERY_GET_RESPONSE, + ) def put(self, request: Request, organization, query) -> Response: """ - Modify a saved query + Modify a saved query. """ if not self.has_feature(organization, request): return self.respond(status=404) @@ -109,9 +146,18 @@ def put(self, request: Request, organization, query) -> Response: return Response(serialize(query), status=200) + @extend_schema( + operation_id="Delete an Organization's Discover Saved Query", + parameters=[GlobalParams.ORG_ID_OR_SLUG, DiscoverSavedQueryParams.DISCOVER_SAVED_QUERY_ID], + responses={ + 204: RESPONSE_NO_CONTENT, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + ) def delete(self, request: Request, organization, query) -> Response: """ - Delete a saved query + Delete a saved query. """ if not self.has_feature(organization, request): return self.respond(status=404) diff --git a/src/sentry/discover/endpoints/serializers.py b/src/sentry/discover/endpoints/serializers.py index 7597ff395f7cb..52bb22efdfe6f 100644 --- a/src/sentry/discover/endpoints/serializers.py +++ b/src/sentry/discover/endpoints/serializers.py @@ -2,6 +2,7 @@ from collections.abc import Sequence from django.db.models import Count, Max, QuerySet +from drf_spectacular.utils import extend_schema_serializer from rest_framework import serializers from rest_framework.serializers import ListField @@ -143,18 +144,56 @@ def get_condition(self, condition): return condition +@extend_schema_serializer( + exclude_fields=["rollup", "aggregations", "groupby", "conditions", "limit", "version", "widths"] +) class DiscoverSavedQuerySerializer(serializers.Serializer): - name = serializers.CharField(required=True, max_length=255) - projects = ListField(child=serializers.IntegerField(), required=False, default=[]) + name = serializers.CharField( + required=True, max_length=255, help_text="The user-defined saved query name." + ) + projects = ListField( + child=serializers.IntegerField(), + required=False, + default=[], + help_text="The saved projects filter for this query.", + ) queryDataset = serializers.ChoiceField( choices=DiscoverSavedQueryTypes.as_text_choices(), default=DiscoverSavedQueryTypes.get_type_name(DiscoverSavedQueryTypes.DISCOVER), + help_text="The dataset you would like to query.", + ) + start = serializers.DateTimeField( + required=False, allow_null=True, help_text="The saved start time for this saved query." + ) + end = serializers.DateTimeField( + required=False, allow_null=True, help_text="The saved end time for this saved query." + ) + range = serializers.CharField( + required=False, + allow_null=True, + help_text="The saved time range period for this saved query.", + ) + fields = ListField( + child=serializers.CharField(), + required=False, + allow_null=True, + help_text="""The fields, functions, or equations that can be requested for the query. At most 20 fields can be selected per request. Each field can be one of the following types: +- A built-in key field. See possible fields in the [properties table](/product/sentry-basics/search/searchable-properties/#properties-table), under any field that is an event property. + - example: `field=transaction` +- A tag. Tags should use the `tag[]` formatting to avoid ambiguity with any fields + - example: `field=tag[isEnterprise]` +- A function which will be in the format of `function_name(parameters,...)`. See possible functions in the [query builder documentation](/product/discover-queries/query-builder/#stacking-functions). + - when a function is included, Discover will group by any tags or fields + - example: `field=count_if(transaction.duration,greater,300)` +- An equation when prefixed with `equation|`. Read more about [equations here](/product/discover-queries/query-builder/query-equations/). + - example: `field=equation|count_if(transaction.duration,greater,300) / count() * 100` +""", + ) # type: ignore[assignment] # XXX: clobbers Serializer.fields + orderby = serializers.CharField( + required=False, + allow_null=True, + help_text="How to order the query results. Must be something in the `field` list, excluding equations.", ) - start = serializers.DateTimeField(required=False, allow_null=True) - end = serializers.DateTimeField(required=False, allow_null=True) - range = serializers.CharField(required=False, allow_null=True) - fields = ListField(child=serializers.CharField(), required=False, allow_null=True) # type: ignore[assignment] # XXX: clobbers Serializer.fields - orderby = serializers.CharField(required=False, allow_null=True) # This block of fields is only accepted by discover 1 which omits the version # attribute or has it set to 1 @@ -168,13 +207,46 @@ class DiscoverSavedQuerySerializer(serializers.Serializer): version = serializers.IntegerField(min_value=1, max_value=2, required=False, allow_null=True) # Attributes that are only accepted if version = 2 - environment = ListField(child=serializers.CharField(), required=False, allow_null=True) - query = serializers.CharField(required=False, allow_null=True) + environment = ListField( + child=serializers.CharField(), + required=False, + allow_null=True, + help_text="The name of environments to filter by.", + ) + query = serializers.CharField( + required=False, + allow_null=True, + help_text="Filters results by using [query syntax](/product/sentry-basics/search/).", + ) widths = ListField(child=serializers.CharField(), required=False, allow_null=True) - yAxis = ListField(child=serializers.CharField(), required=False, allow_null=True) - display = serializers.CharField(required=False, allow_null=True) - topEvents = serializers.IntegerField(min_value=1, max_value=10, required=False, allow_null=True) - interval = serializers.CharField(required=False, allow_null=True) + yAxis = ListField( + child=serializers.CharField(), + required=False, + allow_null=True, + help_text="Aggregate functions to be plotted on the chart.", + ) + display = serializers.CharField( + required=False, + allow_null=True, + help_text="""Visualization type for saved query chart. Allowed values are: +- default +- previous +- top5 +- daily +- dailytop5 +- bar +""", + ) + topEvents = serializers.IntegerField( + min_value=1, + max_value=10, + required=False, + allow_null=True, + help_text="Number of top events' timeseries to be visualized.", + ) + interval = serializers.CharField( + required=False, allow_null=True, help_text="Resolution of the time series." + ) disallowed_fields = { 1: {"environment", "query", "yAxis", "display", "topEvents", "interval"}, From 1acd0b64b1c1bcff6b0e3806c3048cdc78549d1a Mon Sep 17 00:00:00 2001 From: Alex Zaslavsky Date: Wed, 31 Jul 2024 12:58:14 -0700 Subject: [PATCH 21/52] fix(relocation): Use proper provenance for SAAS -> SAAS (#75355) This ensures that users are merged, rather than having new accounts created. --- src/sentry/tasks/relocation.py | 2 +- tests/sentry/tasks/test_relocation.py | 82 +++++++++++++++++++++++---- 2 files changed, 71 insertions(+), 13 deletions(-) diff --git a/src/sentry/tasks/relocation.py b/src/sentry/tasks/relocation.py index 858b3a3d9a676..8082fb9cc1e3e 100644 --- a/src/sentry/tasks/relocation.py +++ b/src/sentry/tasks/relocation.py @@ -1401,7 +1401,7 @@ def importing(uuid: UUID) -> None: flags=ImportFlags( import_uuid=str(uuid), hide_organizations=True, - merge_users=False, + merge_users=relocation.provenance == Relocation.Provenance.SAAS_TO_SAAS, overwrite_configs=False, ), org_filter=set(relocation.want_org_slugs), diff --git a/tests/sentry/tasks/test_relocation.py b/tests/sentry/tasks/test_relocation.py index 63b9740b98147..3d5fdc4ead615 100644 --- a/tests/sentry/tasks/test_relocation.py +++ b/tests/sentry/tasks/test_relocation.py @@ -22,6 +22,7 @@ unwrap_encrypted_export_tarball, ) from sentry.backup.dependencies import NormalizedModelName, get_model_name +from sentry.backup.exports import export_in_organization_scope from sentry.backup.helpers import ImportFlags, Printer from sentry.backup.imports import import_in_organization_scope from sentry.models.files.file import File @@ -172,8 +173,14 @@ def file(self): return file - def swap_file( + def swap_relocation_file_with_data_from_fixture( self, file: File, fixture_name: str, blob_size: int = RELOCATION_BLOB_SIZE + ) -> None: + with open(get_fixture_path("backup", fixture_name), "rb") as fp: + return self.swap_relocation_file(file, BytesIO(fp.read()), blob_size) + + def swap_relocation_file( + self, file: File, contents: BytesIO, blob_size: int = RELOCATION_BLOB_SIZE ) -> None: with TemporaryDirectory() as tmp_dir: tmp_priv_key_path = Path(tmp_dir).joinpath("key") @@ -182,13 +189,13 @@ def swap_file( f.write(self.priv_key_pem) with open(tmp_pub_key_path, "wb") as f: f.write(self.pub_key_pem) - with open(get_fixture_path("backup", fixture_name)) as f: - data = json.load(f) - with open(tmp_pub_key_path, "rb") as p: - self.tarball = create_encrypted_export_tarball( - data, LocalFileEncryptor(p) - ).getvalue() - file.putfile(BytesIO(self.tarball), blob_size=blob_size) + + data = json.load(contents) + with open(tmp_pub_key_path, "rb") as p: + self.tarball = create_encrypted_export_tarball( + data, LocalFileEncryptor(p) + ).getvalue() + file.putfile(BytesIO(self.tarball), blob_size=blob_size) def mock_kms_client(self, fake_kms_client: FakeKeyManagementServiceClient): fake_kms_client.asymmetric_decrypt.call_count = 0 @@ -803,7 +810,7 @@ def test_fail_invalid_json( fake_kms_client: FakeKeyManagementServiceClient, ): file = RelocationFile.objects.get(relocation=self.relocation).file - self.swap_file(file, "invalid-user.json") + self.swap_relocation_file_with_data_from_fixture(file, "invalid-user.json") self.mock_message_builder(fake_message_builder) self.mock_kms_client(fake_kms_client) @@ -829,7 +836,7 @@ def test_fail_no_users( fake_kms_client: FakeKeyManagementServiceClient, ): file = RelocationFile.objects.get(relocation=self.relocation).file - self.swap_file(file, "single-option.json") + self.swap_relocation_file_with_data_from_fixture(file, "single-option.json") self.mock_message_builder(fake_message_builder) self.mock_kms_client(fake_kms_client) @@ -880,7 +887,7 @@ def test_fail_no_orgs( fake_kms_client: FakeKeyManagementServiceClient, ): file = RelocationFile.objects.get(relocation=self.relocation).file - self.swap_file(file, "user-with-minimum-privileges.json") + self.swap_relocation_file_with_data_from_fixture(file, "user-with-minimum-privileges.json") self.mock_message_builder(fake_message_builder) self.mock_kms_client(fake_kms_client) @@ -1984,7 +1991,7 @@ def setUp(self): self.relocation.latest_task = OrderedTask.VALIDATING_COMPLETE.name self.relocation.save() - def test_success( + def test_success_self_hosted( self, postprocessing_mock: Mock, fake_kms_client: FakeKeyManagementServiceClient ): self.mock_kms_client(fake_kms_client) @@ -2021,6 +2028,57 @@ def test_success( "sentry.useremail", ] + def test_success_saas_to_saas( + self, postprocessing_mock: Mock, fake_kms_client: FakeKeyManagementServiceClient + ): + org_count = Organization.objects.filter(slug__startswith="testing").count() + with assume_test_silo_mode(SiloMode.CONTROL): + user_count = User.objects.all().count() + + # Export the existing state of the `testing` organization, so that we retain exact ids. + export_contents = BytesIO() + export_in_organization_scope( + export_contents, + org_filter=set(self.relocation.want_org_slugs), + printer=Printer(), + ) + export_contents.seek(0) + + # Convert this into a `SAAS_TO_SAAS` relocation, and use the data we just exported as the + # import blob. + file = RelocationFile.objects.get(relocation=self.relocation).file + self.swap_relocation_file(file, export_contents) + self.mock_kms_client(fake_kms_client) + self.relocation.provenance = Relocation.Provenance.SAAS_TO_SAAS + self.relocation.save() + + # Now, try importing again, which should enable user merging. + importing(self.uuid) + + with assume_test_silo_mode(SiloMode.CONTROL): + # User counts should NOT change, since `merge_users` should be enabled. + assert User.objects.all().count() == user_count + common_user = User.objects.get(username="existing_org_owner@example.com") + + # The existing user should now be in both orgs. + assert OrganizationMember.objects.filter(user_id=common_user.id).count() == 2 + + assert postprocessing_mock.call_count == 1 + assert Organization.objects.filter(slug__startswith="testing").count() == org_count + 1 + assert ( + Organization.objects.filter( + slug__startswith="testing", status=OrganizationStatus.RELOCATION_PENDING_APPROVAL + ).count() + == 1 + ) + + with assume_test_silo_mode(SiloMode.CONTROL): + assert ControlImportChunk.objects.filter(import_uuid=self.uuid).count() == 1 + assert sorted(ControlImportChunk.objects.values_list("model", flat=True)) == [ + "sentry.user", + # We don't overwrite `sentry.useremail`, retaining the existing value instead. + ] + def test_pause( self, postprocessing_mock: Mock, From 35a0660d8387c6139e832626445099ba7618db6a Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 31 Jul 2024 16:07:29 -0400 Subject: [PATCH 22/52] ref: upgrade django-stubs (#75364) this had three sets of breakage addressed by other PRs: - our foreign key subclass was not functioning, django-stubs added a default TypeVar here which started getting filled in with an unbound TypeVar resulting in thousands of errors: fixed by https://github.com/getsentry/sentry/pull/75228 - we were able to remove our fork's [descriptor patch](https://github.com/getsentry/sentry-forked-django-stubs/pull/4) (which removed the non-model overload of `__get__` for fields) as mypy [fixed this issue](https://github.com/python/mypy/pull/17381). in doing so it pointed out an unsafe descriptor access through a mixin and so that had to go: https://github.com/getsentry/sentry/pull/75360 - django-stubs improved some field validation through QuerySets which was only checked through managers before: fixed by https://github.com/getsentry/sentry/pull/75359 --- requirements-dev-frozen.txt | 4 ++-- requirements-dev.txt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 548b32530c707..44c20e91c8d02 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -43,7 +43,7 @@ django==5.0.7 django-crispy-forms==1.14.0 django-csp==3.8 django-pg-zero-downtime-migrations==0.13 -django-stubs-ext==5.0.2 +django-stubs-ext==5.0.4 djangorestframework==3.15.2 docker==6.1.3 drf-spectacular==0.26.3 @@ -182,7 +182,7 @@ selenium==4.16.0 sentry-arroyo==2.16.5 sentry-cli==2.16.0 sentry-devenv==1.7.0 -sentry-forked-django-stubs==5.0.2.post10 +sentry-forked-django-stubs==5.0.4.post1 sentry-forked-djangorestframework-stubs==3.15.0.post1 sentry-kafka-schemas==0.1.102 sentry-ophio==0.2.7 diff --git a/requirements-dev.txt b/requirements-dev.txt index 17e550622ea69..51f99bf9ef86a 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -35,7 +35,7 @@ pip-tools>=7.1.0 packaging>=21.3 # for type checking -sentry-forked-django-stubs>=5.0.2.post10 +sentry-forked-django-stubs>=5.0.4.post1 sentry-forked-djangorestframework-stubs>=3.15.0.post1 lxml-stubs msgpack-types>=0.2.0 From ff85a731dc616f8096c65da79d1596d1e906288c Mon Sep 17 00:00:00 2001 From: Michelle Fu <83109586+mifu67@users.noreply.github.com> Date: Wed, 31 Jul 2024 13:11:48 -0700 Subject: [PATCH 23/52] nit(anomaly detection): Update anomaly types to match seer repo (#75357) Fixes https://getsentry.atlassian.net/browse/ALRT-184 --- .../incidents/subscription_processor.py | 5 +++-- src/sentry/seer/anomaly_detection/types.py | 8 ++++++++ .../incidents/test_subscription_processor.py | 19 +++++++++++++------ 3 files changed, 24 insertions(+), 8 deletions(-) diff --git a/src/sentry/incidents/subscription_processor.py b/src/sentry/incidents/subscription_processor.py index 4968bfa765366..c8dea8ca01816 100644 --- a/src/sentry/incidents/subscription_processor.py +++ b/src/sentry/incidents/subscription_processor.py @@ -47,6 +47,7 @@ from sentry.incidents.utils.types import QuerySubscriptionUpdate from sentry.models.project import Project from sentry.net.http import connection_from_url +from sentry.seer.anomaly_detection.types import AnomalyType from sentry.seer.anomaly_detection.utils import translate_direction from sentry.seer.signed_seer_api import make_signed_seer_api_request from sentry.snuba.dataset import Dataset @@ -630,8 +631,8 @@ def has_anomaly(self, anomaly, label: str) -> bool: """ anomaly_type = anomaly.get("anomaly", {}).get("anomaly_type") - if anomaly_type == "anomaly_high" or ( - label == WARNING_TRIGGER_LABEL and anomaly_type == "anomaly_low" + if anomaly_type == AnomalyType.HIGH_CONFIDENCE.value or ( + label == WARNING_TRIGGER_LABEL and anomaly_type == AnomalyType.LOW_CONFIDENCE.value ): return True return False diff --git a/src/sentry/seer/anomaly_detection/types.py b/src/sentry/seer/anomaly_detection/types.py index 827d1feae4fc8..9da123cc51b84 100644 --- a/src/sentry/seer/anomaly_detection/types.py +++ b/src/sentry/seer/anomaly_detection/types.py @@ -1,3 +1,4 @@ +from enum import Enum from typing import TypedDict @@ -23,3 +24,10 @@ class StoreDataRequest(TypedDict): alert: AlertInSeer config: AnomalyDetectionConfig timeseries: list[TimeSeriesPoint] + + +class AnomalyType(Enum): + HIGH_CONFIDENCE = "anomaly_higher_confidence" + LOW_CONFIDENCE = "anomaly_lower_confidence" + NONE = "none" + NO_DATA = "no_data" diff --git a/tests/sentry/incidents/test_subscription_processor.py b/tests/sentry/incidents/test_subscription_processor.py index 5a8d9d8467d93..e21e1761355fe 100644 --- a/tests/sentry/incidents/test_subscription_processor.py +++ b/tests/sentry/incidents/test_subscription_processor.py @@ -50,6 +50,7 @@ update_alert_rule_stats, ) from sentry.incidents.utils.types import AlertRuleActivationConditionType +from sentry.seer.anomaly_detection.types import AnomalyType from sentry.seer.anomaly_detection.utils import translate_direction from sentry.sentry_metrics.configuration import UseCaseKey from sentry.sentry_metrics.indexer.postgres.models import MetricsKeyIndexer @@ -448,7 +449,10 @@ def test_seer_call(self, mock_seer_request: MagicMock): seer_return_value_1 = { "anomalies": [ { - "anomaly": {"anomaly_score": 0.7, "anomaly_type": "anomaly_low"}, + "anomaly": { + "anomaly_score": 0.7, + "anomaly_type": AnomalyType.LOW_CONFIDENCE.value, + }, "timestamp": 1, "value": 5, } @@ -485,7 +489,10 @@ def test_seer_call(self, mock_seer_request: MagicMock): seer_return_value_2 = { "anomalies": [ { - "anomaly": {"anomaly_score": 0.9, "anomaly_type": "anomaly_high"}, + "anomaly": { + "anomaly_score": 0.9, + "anomaly_type": AnomalyType.HIGH_CONFIDENCE.value, + }, "timestamp": 1, "value": 10, } @@ -522,7 +529,7 @@ def test_seer_call(self, mock_seer_request: MagicMock): seer_return_value_3 = { "anomalies": [ { - "anomaly": {"anomaly_score": 0.5, "anomaly_type": "none"}, + "anomaly": {"anomaly_score": 0.5, "anomaly_type": AnomalyType.NONE.value}, "timestamp": 1, "value": 1, } @@ -557,19 +564,19 @@ def test_has_anomaly(self): rule = self.dynamic_rule # test alert ABOVE anomaly1 = { - "anomaly": {"anomaly_score": 0.9, "anomaly_type": "anomaly_high"}, + "anomaly": {"anomaly_score": 0.9, "anomaly_type": AnomalyType.HIGH_CONFIDENCE.value}, "timestamp": 1, "value": 10, } anomaly2 = { - "anomaly": {"anomaly_score": 0.6, "anomaly_type": "anomaly_low"}, + "anomaly": {"anomaly_score": 0.6, "anomaly_type": AnomalyType.LOW_CONFIDENCE.value}, "timestamp": 1, "value": 10, } not_anomaly = { - "anomaly": {"anomaly_score": 0.2, "anomaly_type": "none"}, + "anomaly": {"anomaly_score": 0.2, "anomaly_type": AnomalyType.NONE.value}, "timestamp": 1, "value": 10, } From bd524218d65df93cc252913fecbc011d7c36adaa Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 31 Jul 2024 16:21:56 -0400 Subject: [PATCH 24/52] ref: forbid 'Unpacking a string is disallowed' mypy error in ignored files (#75365) slight improvement to typing-ignored `sentry.eventstore.models` --- .github/workflows/backend.yml | 1 + src/sentry/eventstore/models.py | 22 +++++++++++++++------- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 53b7908fb65fd..aa78c995fd296 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -333,6 +333,7 @@ jobs: ! grep 'Incompatible return value type (got "HttpResponseBase"' .artifacts/mypy-all ! grep 'Incompatible types in "yield"' .artifacts/mypy-all ! grep 'Module "sentry.*has no attribute' .artifacts/mypy-all + ! grep 'Unpacking a string is disallowed' .artifacts/mypy-all ! grep 'base class .* defined the type as.*Permission' .artifacts/mypy-all ! grep 'does not explicitly export attribute' .artifacts/mypy-all diff --git a/src/sentry/eventstore/models.py b/src/sentry/eventstore/models.py index ea11dd1b9b4b0..aea722c079655 100644 --- a/src/sentry/eventstore/models.py +++ b/src/sentry/eventstore/models.py @@ -7,7 +7,7 @@ from copy import deepcopy from datetime import datetime, timezone from hashlib import md5 -from typing import TYPE_CHECKING, Any, Optional, cast +from typing import TYPE_CHECKING, Any, Literal, Optional, cast, overload import orjson import sentry_sdk @@ -305,6 +305,14 @@ def project(self, project: Project) -> None: def interfaces(self) -> Mapping[str, Interface]: return get_interfaces(self.data) + @overload + def get_interface(self, name: Literal["user"]) -> User: + ... + + @overload + def get_interface(self, name: str) -> Interface | None: + ... + def get_interface(self, name: str) -> Interface | None: return self.interfaces.get(name) @@ -376,12 +384,12 @@ def get_hashes(self, force_config: StrategyConfiguration | None = None) -> Calcu if hierarchical_hashes: sentry_sdk.set_tag("get_hashes.hierarchical_variant", hierarchical_hashes[0][0]) - flat_hashes = [hash_ for _, hash_ in flat_hashes] - hierarchical_hashes = [hash_ for _, hash_ in hierarchical_hashes] + flat_hashes_values = [hash_ for _, hash_ in flat_hashes] + hierarchical_hashes_values = [hash_ for _, hash_ in hierarchical_hashes] return CalculatedHashes( - hashes=flat_hashes, - hierarchical_hashes=hierarchical_hashes, + hashes=flat_hashes_values, + hierarchical_hashes=hierarchical_hashes_values, tree_labels=tree_labels, variants=variants, ) @@ -389,7 +397,7 @@ def get_hashes(self, force_config: StrategyConfiguration | None = None) -> Calcu @staticmethod def _hashes_from_sorted_grouping_variants( variants: KeyedVariants, - ) -> tuple[list[str], list[Any]]: + ) -> tuple[list[tuple[str, str]], list[Any]]: """Create hashes from variants and filter out duplicates and None values""" from sentry.grouping.variants import ComponentVariant @@ -449,7 +457,7 @@ def get_grouping_variants( if isinstance(force_config, str): # A string like `"mobile:2021-02-12"` stored_config = self.get_grouping_config() - grouping_config = dict(stored_config) + grouping_config = stored_config.copy() grouping_config["id"] = force_config loaded_grouping_config = load_grouping_config(grouping_config) elif isinstance(force_config, StrategyConfiguration): From 25aafd33e961ec96e0f69aecc0f15c71ff6227b1 Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Wed, 31 Jul 2024 13:29:09 -0700 Subject: [PATCH 25/52] chore(azure): check if repo config instance differs from integration domain name (#75277) --- src/sentry/integrations/vsts/integration.py | 7 +++--- src/sentry/integrations/vsts/repository.py | 26 ++++++++++++++++++++- 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/src/sentry/integrations/vsts/integration.py b/src/sentry/integrations/vsts/integration.py index 941ef7cfddafb..60eb82add4ec8 100644 --- a/src/sentry/integrations/vsts/integration.py +++ b/src/sentry/integrations/vsts/integration.py @@ -133,7 +133,7 @@ def all_repos_migrated(self) -> bool: def get_repositories(self, query: str | None = None) -> Sequence[Mapping[str, str]]: try: - repos = self.get_client(base_url=self.instance).get_repos() + repos = self.get_client().get_repos() except (ApiError, IdentityNotValid) as e: raise IntegrationError(self.message_from_error(e)) data = [] @@ -154,7 +154,7 @@ def get_unmigratable_repositories(self) -> Collection[RpcRepository]: return [repo for repo in repos if repo.external_id not in identifiers_to_exclude] def has_repo_access(self, repo: RpcRepository) -> bool: - client = self.get_client(base_url=self.instance) + client = self.get_client() try: # since we don't actually use webhooks for vsts commits, # just verify repo access @@ -193,8 +193,7 @@ def check_domain_name(self, default_identity: RpcIdentity) -> None: self.model.save() def get_organization_config(self) -> Sequence[Mapping[str, Any]]: - instance = self.model.metadata["domain_name"] - client = self.get_client(base_url=instance) + client = self.get_client() project_selector = [] all_states_set = set() diff --git a/src/sentry/integrations/vsts/repository.py b/src/sentry/integrations/vsts/repository.py index be267defa2e77..ba30693bb7edc 100644 --- a/src/sentry/integrations/vsts/repository.py +++ b/src/sentry/integrations/vsts/repository.py @@ -1,5 +1,6 @@ from __future__ import annotations +import logging from collections.abc import Mapping, MutableMapping, Sequence from typing import Any @@ -11,6 +12,8 @@ MAX_COMMIT_DATA_REQUESTS = 90 +logger = logging.getLogger(__name__) + class VstsRepositoryProvider(IntegrationRepositoryProvider): name = "Azure DevOps" @@ -73,7 +76,18 @@ def zip_commit_data( self, repo: Repository, commit_list: Sequence[Commit], organization_id: int ) -> Sequence[Commit]: installation = self.get_installation(repo.integration_id, organization_id) - client = installation.get_client(base_url=repo.config["instance"]) + instance = repo.config["instance"] + if installation.instance != instance: + logger.info( + "integrations.vsts.mismatched_instance", + extra={ + "repo_instance": instance, + "installation_instance": installation.instance, + "org_integration_id": repo.integration_id, + "repo_id": repo.id, + }, + ) + client = installation.get_client(base_url=instance) n = 0 for commit in commit_list: # Azure will truncate commit comments to only the first line. @@ -100,6 +114,16 @@ def compare_commits( """TODO(mgaeta): This function is kinda a mess.""" installation = self.get_installation(repo.integration_id, repo.organization_id) instance = repo.config["instance"] + if installation.instance != instance: + logger.info( + "integrations.vsts.mismatched_instance", + extra={ + "repo_instance": instance, + "installation_instance": installation.instance, + "org_integration_id": repo.integration_id, + "repo_id": repo.id, + }, + ) client = installation.get_client(base_url=instance) try: From dce017f8b5e07a68d0a04482d2c2e995f950e828 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Wed, 31 Jul 2024 20:35:51 +0000 Subject: [PATCH 26/52] Revert "chore(api): Unpublish ProjectUserReportsEndpoint (#75079)" This reverts commit be176fc1bca1ceacd1f91ba0c57c4b1cf19c7aaa. Co-authored-by: aliu39 <159852527+aliu39@users.noreply.github.com> --- api-docs/openapi.json | 3 + api-docs/paths/projects/user-feedback.json | 175 ++++++++++++++++++ .../api/endpoints/project_user_reports.py | 4 +- .../endpoints/projects/test_user_feedback.py | 37 ++++ 4 files changed, 217 insertions(+), 2 deletions(-) create mode 100644 api-docs/paths/projects/user-feedback.json create mode 100644 tests/apidocs/endpoints/projects/test_user_feedback.py diff --git a/api-docs/openapi.json b/api-docs/openapi.json index 0d80ed7180580..bb50e3c6837bb 100644 --- a/api-docs/openapi.json +++ b/api-docs/openapi.json @@ -117,6 +117,9 @@ "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/stats/": { "$ref": "paths/projects/stats.json" }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/user-feedback/": { + "$ref": "paths/projects/user-feedback.json" + }, "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/hooks/": { "$ref": "paths/projects/service-hooks.json" }, diff --git a/api-docs/paths/projects/user-feedback.json b/api-docs/paths/projects/user-feedback.json new file mode 100644 index 0000000000000..2913a4590520e --- /dev/null +++ b/api-docs/paths/projects/user-feedback.json @@ -0,0 +1,175 @@ +{ + "get": { + "tags": ["Projects"], + "description": "Return a list of user feedback items within this project.", + "operationId": "List a Project's User Feedback", + "parameters": [ + { + "name": "organization_id_or_slug", + "in": "path", + "description": "The id or slug of the organization.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "project_id_or_slug", + "in": "path", + "description": "The id or slug of the project.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Success", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "../../components/schemas/user-feedback.json#/UserFeedback" + } + }, + "example": [ + { + "comments": "It broke!", + "dateCreated": "2018-11-06T21:20:11.468Z", + "email": "jane@example.com", + "event": { + "eventID": "14bad9a2e3774046977a21440ddb39b2", + "id": null + }, + "eventID": "14bad9a2e3774046977a21440ddb39b2", + "id": "1", + "issue": null, + "name": "Jane Smith", + "user": null + } + ] + } + } + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "Not Found" + } + }, + "security": [ + { + "auth_token": ["project:read"] + } + ] + }, + "post": { + "tags": ["Projects"], + "description": "Submit and associate user feedback with an issue.\n\nFeedback must be received by the server no more than 30 minutes after the event was saved.\n\nAdditionally, within 5 minutes of submitting feedback it may also be overwritten. This is useful in situations where you may need to retry sending a request due to network failures.\n\nIf feedback is rejected due to a mutability threshold, a 409 status code will be returned.\n\nNote: Feedback may be submitted with DSN authentication (see auth documentation).", + "operationId": "Submit User Feedback", + "parameters": [ + { + "name": "organization_id_or_slug", + "in": "path", + "description": "The id or slug of the organization.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "project_id_or_slug", + "in": "path", + "description": "The id or slug of the project.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "required": ["event_id", "name", "email", "comments"], + "type": "object", + "properties": { + "event_id": { + "type": "string", + "description": "The event ID. This can be retrieved from the [beforeSend callback](https://docs.sentry.io/platforms/javascript/configuration/filtering/#using-beforesend)." + }, + "name": { + "type": "string", + "description": "User's name." + }, + "email": { + "type": "string", + "description": "User's email address." + }, + "comments": { + "type": "string", + "description": "Comments supplied by user." + } + } + }, + "example": { + "event_id": "14bad9a2e3774046977a21440ddb39b2", + "name": "Jane Schmidt", + "email": "jane@empowerplant.io", + "comments": "It broke!" + } + } + }, + "required": false + }, + "responses": { + "200": { + "description": "Success", + "content": { + "application/json": { + "schema": { + "$ref": "../../components/schemas/user-feedback.json#/UserFeedback" + }, + "example": { + "comments": "It broke!", + "dateCreated": "2018-11-06T21:20:11.468Z", + "email": "jane@example.com", + "event": { + "eventID": "14bad9a2e3774046977a21440ddb39b2", + "id": null + }, + "eventID": "14bad9a2e3774046977a21440ddb39b2", + "id": "1", + "issue": null, + "name": "Jane Smith", + "user": null + } + } + } + }, + "400": { + "description": "Bad Input" + }, + "403": { + "description": "Forbidden" + }, + "404": { + "description": "The requested resource does not exist" + }, + "409": { + "description": "Conflict" + } + }, + "security": [ + { + "auth_token": ["project:write"] + }, + { + "dsn": [] + } + ] + } +} diff --git a/src/sentry/api/endpoints/project_user_reports.py b/src/sentry/api/endpoints/project_user_reports.py index 7175b371f7efa..36dda69e763ff 100644 --- a/src/sentry/api/endpoints/project_user_reports.py +++ b/src/sentry/api/endpoints/project_user_reports.py @@ -33,8 +33,8 @@ class _PaginateKwargs(TypedDict): class ProjectUserReportsEndpoint(ProjectEndpoint, EnvironmentMixin): owner = ApiOwner.FEEDBACK publish_status = { - "GET": ApiPublishStatus.PRIVATE, - "POST": ApiPublishStatus.PRIVATE, + "GET": ApiPublishStatus.PRIVATE, # TODO: deprecate + "POST": ApiPublishStatus.PRIVATE, # TODO: deprecate } authentication_classes = ProjectEndpoint.authentication_classes + (DSNAuthentication,) diff --git a/tests/apidocs/endpoints/projects/test_user_feedback.py b/tests/apidocs/endpoints/projects/test_user_feedback.py new file mode 100644 index 0000000000000..07aceff8b095e --- /dev/null +++ b/tests/apidocs/endpoints/projects/test_user_feedback.py @@ -0,0 +1,37 @@ +from django.test.client import RequestFactory +from django.utils import timezone + +from fixtures.apidocs_test_case import APIDocsTestCase + + +class ProjectUserFeedbackDocs(APIDocsTestCase): + def setUp(self): + event = self.create_event("a", message="oh no") + self.event_id = event.event_id + self.create_userreport( + date_added=timezone.now(), + project=self.project, + event_id=self.event_id, + ) + + self.url = f"/api/0/projects/{self.organization.slug}/{self.project.slug}/user-feedback/" + + self.login_as(user=self.user) + + def test_get(self): + response = self.client.get(self.url) + request = RequestFactory().get(self.url) + + self.validate_schema(request, response) + + def test_post(self): + data = { + "event_id": self.event_id, + "name": "Hellboy", + "email": "hellboy@sentry.io", + "comments": "It broke!", + } + response = self.client.post(self.url, data) + request = RequestFactory().post(self.url, data) + + self.validate_schema(request, response) From fa353006cc155c711ebcc512b97749c4fdf25f8a Mon Sep 17 00:00:00 2001 From: Christinarlong <60594860+Christinarlong@users.noreply.github.com> Date: Wed, 31 Jul 2024 13:46:32 -0700 Subject: [PATCH 27/52] ref(control_silo): Consolidate integrations web resources (#75171) Moves all integrations web resources and related tests to sentry/integrations/web and tests/sentry/integrations/web. Additionally updates the moved files' typing. ref(#73859) --- pyproject.toml | 3 --- src/sentry/integrations/discord/urls.py | 4 +++- .../jira/views/extension_configuration.py | 6 +++--- src/sentry/integrations/msteams/urls.py | 4 +++- src/sentry/integrations/vercel/urls.py | 2 +- src/sentry/integrations/vsts/urls.py | 2 +- .../web}/debug/debug_notify_disable.py | 3 +-- .../web}/discord_extension_configuration.py | 0 .../web}/doc_integration_avatar.py | 0 .../web}/integration_extension_configuration.py | 17 +++++++++++++++-- .../web}/msteams_extension_configuration.py | 5 +++-- .../web}/organization_integration_setup.py | 9 ++++++--- .../web}/vercel_extension_configuration.py | 0 .../web}/vsts_extension_configuration.py | 0 .../middleware/integrations/parsers/discord.py | 4 +++- src/sentry/pipeline/base.py | 2 +- src/sentry/web/debug_urls.py | 2 +- src/sentry/web/urls.py | 4 ++-- tests/sentry/api/test_path_params.py | 2 +- .../web}/test_msteams_extension.py | 6 ++++-- .../web}/test_organization_integration_setup.py | 0 21 files changed, 48 insertions(+), 27 deletions(-) rename src/sentry/{web/frontend => integrations/web}/debug/debug_notify_disable.py (96%) rename src/sentry/{web/frontend => integrations/web}/discord_extension_configuration.py (100%) rename src/sentry/{web/frontend => integrations/web}/doc_integration_avatar.py (100%) rename src/sentry/{web/frontend => integrations/web}/integration_extension_configuration.py (90%) rename src/sentry/{web/frontend => integrations/web}/msteams_extension_configuration.py (87%) rename src/sentry/{web/frontend => integrations/web}/organization_integration_setup.py (83%) rename src/sentry/{web/frontend => integrations/web}/vercel_extension_configuration.py (100%) rename src/sentry/{web/frontend => integrations/web}/vsts_extension_configuration.py (100%) rename tests/sentry/{web/frontend => integrations/web}/test_msteams_extension.py (90%) rename tests/sentry/{web/frontend => integrations/web}/test_organization_integration_setup.py (100%) diff --git a/pyproject.toml b/pyproject.toml index 7ad5a5a9a2250..f409a750bad0e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -434,12 +434,10 @@ module = [ "sentry.web.frontend.disabled_member_view", "sentry.web.frontend.group_plugin_action", "sentry.web.frontend.idp_email_verification", - "sentry.web.frontend.integration_extension_configuration", "sentry.web.frontend.js_sdk_loader", "sentry.web.frontend.newest_issue", "sentry.web.frontend.oauth_authorize", "sentry.web.frontend.oauth_token", - "sentry.web.frontend.organization_integration_setup", "sentry.web.frontend.pipeline_advancer", "sentry.web.frontend.project_event", "sentry.web.frontend.react_page", @@ -465,7 +463,6 @@ module = [ "tests.sentry.digests.test_notifications", "tests.sentry.eventstore.test_base", "tests.sentry.grouping.test_result", - "tests.sentry.identity.test_oauth2", "tests.sentry.incidents.test_logic", "tests.sentry.ingest.test_slicing", "tests.sentry.issues.test_utils", diff --git a/src/sentry/integrations/discord/urls.py b/src/sentry/integrations/discord/urls.py index 30ed4e49d0bc2..c93ebc084cfdd 100644 --- a/src/sentry/integrations/discord/urls.py +++ b/src/sentry/integrations/discord/urls.py @@ -1,6 +1,8 @@ from django.urls import re_path -from sentry.web.frontend.discord_extension_configuration import DiscordExtensionConfigurationView +from sentry.integrations.web.discord_extension_configuration import ( + DiscordExtensionConfigurationView, +) from .views.link_identity import DiscordLinkIdentityView from .views.unlink_identity import DiscordUnlinkIdentityView diff --git a/src/sentry/integrations/jira/views/extension_configuration.py b/src/sentry/integrations/jira/views/extension_configuration.py index 700518c109d11..ed3465ae3fbf7 100644 --- a/src/sentry/integrations/jira/views/extension_configuration.py +++ b/src/sentry/integrations/jira/views/extension_configuration.py @@ -1,10 +1,10 @@ import orjson -from sentry.utils.signing import unsign -from sentry.web.frontend.base import control_silo_view -from sentry.web.frontend.integration_extension_configuration import ( +from sentry.integrations.web.integration_extension_configuration import ( IntegrationExtensionConfigurationView, ) +from sentry.utils.signing import unsign +from sentry.web.frontend.base import control_silo_view from . import SALT diff --git a/src/sentry/integrations/msteams/urls.py b/src/sentry/integrations/msteams/urls.py index 3d9667dfab328..21cc973f368ed 100644 --- a/src/sentry/integrations/msteams/urls.py +++ b/src/sentry/integrations/msteams/urls.py @@ -1,6 +1,8 @@ from django.urls import re_path -from sentry.web.frontend.msteams_extension_configuration import MsTeamsExtensionConfigurationView +from sentry.integrations.web.msteams_extension_configuration import ( + MsTeamsExtensionConfigurationView, +) from .link_identity import MsTeamsLinkIdentityView from .unlink_identity import MsTeamsUnlinkIdentityView diff --git a/src/sentry/integrations/vercel/urls.py b/src/sentry/integrations/vercel/urls.py index 4e74c8aa2dfac..d15cd0d95342b 100644 --- a/src/sentry/integrations/vercel/urls.py +++ b/src/sentry/integrations/vercel/urls.py @@ -1,6 +1,6 @@ from django.urls import re_path -from sentry.web.frontend.vercel_extension_configuration import VercelExtensionConfigurationView +from sentry.integrations.web.vercel_extension_configuration import VercelExtensionConfigurationView from .webhook import VercelWebhookEndpoint diff --git a/src/sentry/integrations/vsts/urls.py b/src/sentry/integrations/vsts/urls.py index 44f6c38557926..09a95dc985e17 100644 --- a/src/sentry/integrations/vsts/urls.py +++ b/src/sentry/integrations/vsts/urls.py @@ -1,6 +1,6 @@ from django.urls import re_path -from sentry.web.frontend.vsts_extension_configuration import VstsExtensionConfigurationView +from sentry.integrations.web.vsts_extension_configuration import VstsExtensionConfigurationView from .search import VstsSearchEndpoint from .webhooks import WorkItemWebhook diff --git a/src/sentry/web/frontend/debug/debug_notify_disable.py b/src/sentry/integrations/web/debug/debug_notify_disable.py similarity index 96% rename from src/sentry/web/frontend/debug/debug_notify_disable.py rename to src/sentry/integrations/web/debug/debug_notify_disable.py index d3fb7aa9f6ef4..a0e058c380890 100644 --- a/src/sentry/web/frontend/debug/debug_notify_disable.py +++ b/src/sentry/integrations/web/debug/debug_notify_disable.py @@ -4,8 +4,7 @@ from sentry.integrations.models.integration import Integration from sentry.integrations.notify_disable import get_provider_type, get_url from sentry.models.organization import Organization - -from .mail import MailPreview +from sentry.web.frontend.debug.mail import MailPreview class DebugNotifyDisableView(View): diff --git a/src/sentry/web/frontend/discord_extension_configuration.py b/src/sentry/integrations/web/discord_extension_configuration.py similarity index 100% rename from src/sentry/web/frontend/discord_extension_configuration.py rename to src/sentry/integrations/web/discord_extension_configuration.py diff --git a/src/sentry/web/frontend/doc_integration_avatar.py b/src/sentry/integrations/web/doc_integration_avatar.py similarity index 100% rename from src/sentry/web/frontend/doc_integration_avatar.py rename to src/sentry/integrations/web/doc_integration_avatar.py diff --git a/src/sentry/web/frontend/integration_extension_configuration.py b/src/sentry/integrations/web/integration_extension_configuration.py similarity index 90% rename from src/sentry/web/frontend/integration_extension_configuration.py rename to src/sentry/integrations/web/integration_extension_configuration.py index d4aaba9bb0010..9d5ccd312266a 100644 --- a/src/sentry/web/frontend/integration_extension_configuration.py +++ b/src/sentry/integrations/web/integration_extension_configuration.py @@ -8,9 +8,12 @@ from django.utils.http import urlencode from sentry import features +from sentry.hybridcloud.services.organization_mapping.model import RpcOrganizationMapping +from sentry.integrations.base import IntegrationProvider from sentry.integrations.manager import default_manager as integrations from sentry.integrations.pipeline import IntegrationPipeline from sentry.organizations.services.organization import organization_service +from sentry.organizations.services.organization.model import RpcOrganization from sentry.users.services.user.service import user_service from sentry.web.frontend.base import BaseView @@ -19,8 +22,14 @@ class ExternalIntegrationPipeline(IntegrationPipeline): def _dialog_success(self, _org_integration): + assert self.organization, "Organization must exist to get slug" org_slug = self.organization.slug + + assert isinstance( + self.provider, IntegrationProvider + ), "Must be an IntegrationProvider to get integration key" provider = self.provider.integration_key + integration_id = self.integration.id # add in param string if we have a next page param_string = "" @@ -36,6 +45,8 @@ def _dialog_success(self, _org_integration): class IntegrationExtensionConfigurationView(BaseView): auth_required = False + external_provider_key: str + provider: str def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponseBase: if not request.user.is_authenticated: @@ -51,7 +62,7 @@ def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponseBase: return self.redirect(redirect_uri) # check if we have one org - organization = None + organization: RpcOrganization | RpcOrganizationMapping | None = None organizations = user_service.get_organizations(user_id=request.user.id) if len(organizations) == 1: organization = organizations[0] @@ -110,7 +121,9 @@ def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponseBase: def init_pipeline(self, request: HttpRequest, organization, params): pipeline = ExternalIntegrationPipeline( - request=request, organization=organization, provider_key=self.external_provider_key + request=request, + organization=organization, + provider_key=self.external_provider_key, ) pipeline.initialize() diff --git a/src/sentry/web/frontend/msteams_extension_configuration.py b/src/sentry/integrations/web/msteams_extension_configuration.py similarity index 87% rename from src/sentry/web/frontend/msteams_extension_configuration.py rename to src/sentry/integrations/web/msteams_extension_configuration.py index 2aef2f51c72da..b90a29f8548d5 100644 --- a/src/sentry/web/frontend/msteams_extension_configuration.py +++ b/src/sentry/integrations/web/msteams_extension_configuration.py @@ -1,9 +1,10 @@ from sentry.integrations.msteams.constants import SALT +from sentry.integrations.web.integration_extension_configuration import ( + IntegrationExtensionConfigurationView, +) from sentry.utils.signing import unsign from sentry.web.frontend.base import control_silo_view -from .integration_extension_configuration import IntegrationExtensionConfigurationView - # 24 hours to finish installation INSTALL_EXPIRATION_TIME = 60 * 60 * 24 diff --git a/src/sentry/web/frontend/organization_integration_setup.py b/src/sentry/integrations/web/organization_integration_setup.py similarity index 83% rename from src/sentry/web/frontend/organization_integration_setup.py rename to src/sentry/integrations/web/organization_integration_setup.py index 61a5c867e8467..e346c15e055c8 100644 --- a/src/sentry/web/frontend/organization_integration_setup.py +++ b/src/sentry/integrations/web/organization_integration_setup.py @@ -1,13 +1,13 @@ import logging import sentry_sdk -from django.http import Http404 +from django.http import Http404, HttpRequest from django.http.response import HttpResponseBase -from rest_framework.request import Request from sentry_sdk.tracing import TRANSACTION_SOURCE_VIEW from sentry import features from sentry.features.exceptions import FeatureNotRegistered +from sentry.integrations.base import IntegrationProvider from sentry.integrations.pipeline import IntegrationPipeline from sentry.web.frontend.base import ControlSiloOrganizationView, control_silo_view @@ -20,7 +20,7 @@ class OrganizationIntegrationSetupView(ControlSiloOrganizationView): csrf_protect = False - def handle(self, request: Request, organization, provider_id) -> HttpResponseBase: + def handle(self, request: HttpRequest, organization, provider_id) -> HttpResponseBase: scope = sentry_sdk.Scope.get_current_scope() scope.set_transaction_name(f"integration.{provider_id}", source=TRANSACTION_SOURCE_VIEW) @@ -29,6 +29,9 @@ def handle(self, request: Request, organization, provider_id) -> HttpResponseBas ) is_feature_enabled = {} + assert isinstance( + pipeline.provider, IntegrationProvider + ), "Pipeline must be an integration provider to get features" for feature in pipeline.provider.features: feature_flag_name = "organizations:integrations-%s" % feature.value try: diff --git a/src/sentry/web/frontend/vercel_extension_configuration.py b/src/sentry/integrations/web/vercel_extension_configuration.py similarity index 100% rename from src/sentry/web/frontend/vercel_extension_configuration.py rename to src/sentry/integrations/web/vercel_extension_configuration.py diff --git a/src/sentry/web/frontend/vsts_extension_configuration.py b/src/sentry/integrations/web/vsts_extension_configuration.py similarity index 100% rename from src/sentry/web/frontend/vsts_extension_configuration.py rename to src/sentry/integrations/web/vsts_extension_configuration.py diff --git a/src/sentry/middleware/integrations/parsers/discord.py b/src/sentry/middleware/integrations/parsers/discord.py index 61e454bcdaa34..0fc8c1eba9281 100644 --- a/src/sentry/middleware/integrations/parsers/discord.py +++ b/src/sentry/middleware/integrations/parsers/discord.py @@ -18,11 +18,13 @@ ) from sentry.integrations.models.organization_integration import OrganizationIntegration from sentry.integrations.types import EXTERNAL_PROVIDERS, ExternalProviders +from sentry.integrations.web.discord_extension_configuration import ( + DiscordExtensionConfigurationView, +) from sentry.middleware.integrations.tasks import convert_to_async_discord_response from sentry.models.integrations import Integration from sentry.models.outbox import WebhookProviderIdentifier from sentry.types.region import Region -from sentry.web.frontend.discord_extension_configuration import DiscordExtensionConfigurationView logger = logging.getLogger(__name__) diff --git a/src/sentry/pipeline/base.py b/src/sentry/pipeline/base.py index 70bf4dbcd3174..8bcfbc2694473 100644 --- a/src/sentry/pipeline/base.py +++ b/src/sentry/pipeline/base.py @@ -101,7 +101,7 @@ def get_provider(self, provider_key: str) -> PipelineProvider: def __init__( self, - request: Request, + request: Request | HttpRequest, provider_key: str, organization: Organization | RpcOrganization | None = None, provider_model: Model | None = None, diff --git a/src/sentry/web/debug_urls.py b/src/sentry/web/debug_urls.py index 66baffc975791..5fb135a723986 100644 --- a/src/sentry/web/debug_urls.py +++ b/src/sentry/web/debug_urls.py @@ -2,6 +2,7 @@ from django.views.generic import TemplateView import sentry.web.frontend.debug.mail +from sentry.integrations.web.debug.debug_notify_disable import DebugNotifyDisableView from sentry.web.frontend.debug import debug_auth_views from sentry.web.frontend.debug.debug_assigned_email import ( DebugAssignedEmailView, @@ -31,7 +32,6 @@ from sentry.web.frontend.debug.debug_new_release_email import DebugNewReleaseEmailView from sentry.web.frontend.debug.debug_new_user_feedback_email import DebugNewUserFeedbackEmailView from sentry.web.frontend.debug.debug_note_email import DebugNoteEmailView -from sentry.web.frontend.debug.debug_notify_disable import DebugNotifyDisableView from sentry.web.frontend.debug.debug_oauth_authorize import ( DebugOAuthAuthorizeErrorView, DebugOAuthAuthorizeView, diff --git a/src/sentry/web/urls.py b/src/sentry/web/urls.py index c87764044941e..c83a2b7f61efb 100644 --- a/src/sentry/web/urls.py +++ b/src/sentry/web/urls.py @@ -11,6 +11,8 @@ from sentry.api.endpoints.oauth_userinfo import OAuthUserInfoEndpoint from sentry.auth.providers.saml2.provider import SAML2AcceptACSView, SAML2MetadataView, SAML2SLSView from sentry.charts.endpoints import serve_chartcuterie_config +from sentry.integrations.web.doc_integration_avatar import DocIntegrationAvatarPhotoView +from sentry.integrations.web.organization_integration_setup import OrganizationIntegrationSetupView from sentry.web import api from sentry.web.frontend import accounts, generic from sentry.web.frontend.account_identity import AccountIdentityAssociateView @@ -21,7 +23,6 @@ from sentry.web.frontend.auth_organization_login import AuthOrganizationLoginView from sentry.web.frontend.auth_provider_login import AuthProviderLoginView from sentry.web.frontend.disabled_member_view import DisabledMemberView -from sentry.web.frontend.doc_integration_avatar import DocIntegrationAvatarPhotoView from sentry.web.frontend.error_page_embed import ErrorPageEmbedView from sentry.web.frontend.group_event_json import GroupEventJsonView from sentry.web.frontend.group_plugin_action import GroupPluginActionView @@ -35,7 +36,6 @@ from sentry.web.frontend.oauth_token import OAuthTokenView from sentry.web.frontend.organization_auth_settings import OrganizationAuthSettingsView from sentry.web.frontend.organization_avatar import OrganizationAvatarPhotoView -from sentry.web.frontend.organization_integration_setup import OrganizationIntegrationSetupView from sentry.web.frontend.out import OutView from sentry.web.frontend.pipeline_advancer import PipelineAdvancerView from sentry.web.frontend.project_event import ProjectEventRedirect diff --git a/tests/sentry/api/test_path_params.py b/tests/sentry/api/test_path_params.py index 0bb4f0ba654c6..772fea81767e1 100644 --- a/tests/sentry/api/test_path_params.py +++ b/tests/sentry/api/test_path_params.py @@ -33,7 +33,7 @@ def extract_all_url_patterns( @no_silo_test class TestPathParams(TestCase): - IGNORE_CLASS_PREFIXES = ("sentry.web", "sentry.auth") + IGNORE_CLASS_PREFIXES = ("sentry.web", "sentry.integrations.web", "sentry.auth") def test_if_sentry_endpoints_have_id_or_slug_path_params(self): """ diff --git a/tests/sentry/web/frontend/test_msteams_extension.py b/tests/sentry/integrations/web/test_msteams_extension.py similarity index 90% rename from tests/sentry/web/frontend/test_msteams_extension.py rename to tests/sentry/integrations/web/test_msteams_extension.py index 72816fd40ea7c..9cea6c70d927c 100644 --- a/tests/sentry/web/frontend/test_msteams_extension.py +++ b/tests/sentry/integrations/web/test_msteams_extension.py @@ -2,12 +2,14 @@ from django.core.signing import SignatureExpired +from sentry.integrations.web.msteams_extension_configuration import ( + MsTeamsExtensionConfigurationView, +) from sentry.models.organizationmember import OrganizationMember from sentry.silo.base import SiloMode from sentry.testutils.cases import TestCase from sentry.testutils.silo import assume_test_silo_mode, control_silo_test from sentry.utils.signing import sign -from sentry.web.frontend.msteams_extension_configuration import MsTeamsExtensionConfigurationView @control_silo_test @@ -27,7 +29,7 @@ def test_map_params(self): params = {"signed_params": signed_data} assert data == config_view.map_params_to_state(params) - @patch("sentry.web.frontend.msteams_extension_configuration.unsign") + @patch("sentry.integrations.web.msteams_extension_configuration.unsign") def test_expired_signature(self, mock_unsign): with self.feature({"organizations:integrations-alert-rule": True}): mock_unsign.side_effect = SignatureExpired() diff --git a/tests/sentry/web/frontend/test_organization_integration_setup.py b/tests/sentry/integrations/web/test_organization_integration_setup.py similarity index 100% rename from tests/sentry/web/frontend/test_organization_integration_setup.py rename to tests/sentry/integrations/web/test_organization_integration_setup.py From 0454d5f322552c38e6dfb614b973b84b08459b34 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Wed, 31 Jul 2024 13:58:27 -0700 Subject: [PATCH 28/52] fix(toolbar): fix flag and release analytic names (#75374) - renaming the releases analytic to be more specific - i also realized that i deleted the feature flag tracking analytic in https://github.com/getsentry/sentry/pull/75194, but looks like https://github.com/getsentry/sentry/pull/75305 adds them back! --- .../components/releases/releasesPanel.tsx | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/static/app/components/devtoolbar/components/releases/releasesPanel.tsx b/static/app/components/devtoolbar/components/releases/releasesPanel.tsx index 94a28aa6f18d1..83e5966e6dc8f 100644 --- a/static/app/components/devtoolbar/components/releases/releasesPanel.tsx +++ b/static/app/components/devtoolbar/components/releases/releasesPanel.tsx @@ -79,17 +79,19 @@ function ReleaseSummary({orgSlug, release}: {orgSlug: string; release: Release}) css={{width: '100%', alignItems: 'flex-start', padding: 'var(--space150)'}} > - - {formatVersion(release.version)} - - {release.commitCount > 0 && ( - - )} + + + {formatVersion(release.version)} + + {release.commitCount > 0 && ( + + )} + Date: Wed, 31 Jul 2024 17:04:27 -0400 Subject: [PATCH 29/52] feat(insights): reload projects when missing data (#75358) Refetch the projects when the user arrives on an Insights Module or the Performance page with no data. Users can now directly navigate to the Performance or Insights tabs and see their data during onboarding, without needing an additional refresh to synchronize the projects hasData flags. --- static/app/components/createAlertButton.spec.tsx | 5 +++++ .../components/events/eventReplay/index.spec.tsx | 1 + .../interfaces/breadcrumbs/breadcrumbs.spec.tsx | 1 + .../components/replays/header/errorCounts.spec.tsx | 1 + .../app/utils/replays/hooks/useReplayData.spec.tsx | 1 + static/app/utils/useProjects.tsx | 8 +++++++- .../resources/views/resourcesLandingPage.spec.tsx | 1 + .../tables/pagePerformanceTable.spec.tsx | 1 + .../webVitals/views/webVitalsLandingPage.spec.tsx | 1 + .../insights/cache/views/cacheLandingPage.spec.tsx | 2 ++ .../common/components/modulesOnboarding.spec.tsx | 3 +++ .../common/components/modulesOnboarding.tsx | 14 +++++++++++++- .../database/views/databaseLandingPage.spec.tsx | 1 + .../insights/http/views/httpLandingPage.spec.tsx | 1 + .../queries/useCrossPlatformProject.spec.tsx | 1 + .../screenload/views/screenLoadSpansPage.spec.tsx | 1 + .../views/screenloadLandingPage.spec.tsx | 1 + .../queues/views/destinationSummaryPage.spec.tsx | 1 + .../queues/views/queuesLandingPage.spec.tsx | 1 + .../views/monitors/components/monitorForm.spec.tsx | 1 + static/app/views/performance/content.tsx | 12 +++++++++++- .../transactionSpans/spanSummary/content.spec.tsx | 1 + 22 files changed, 57 insertions(+), 3 deletions(-) diff --git a/static/app/components/createAlertButton.spec.tsx b/static/app/components/createAlertButton.spec.tsx index 24a69be02e55a..02ba61ee94f0c 100644 --- a/static/app/components/createAlertButton.spec.tsx +++ b/static/app/components/createAlertButton.spec.tsx @@ -25,6 +25,7 @@ describe('CreateAlertFromViewButton', () => { jest.mocked(useProjects).mockReturnValue({ projects: [], onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, @@ -72,6 +73,7 @@ describe('CreateAlertFromViewButton', () => { jest.mocked(useProjects).mockReturnValue({ projects, onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, @@ -107,6 +109,7 @@ describe('CreateAlertFromViewButton', () => { jest.mocked(useProjects).mockReturnValue({ projects, onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, @@ -155,6 +158,7 @@ describe('CreateAlertFromViewButton', () => { jest.mocked(useProjects).mockReturnValue({ projects, onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, @@ -257,6 +261,7 @@ describe('CreateAlertFromViewButton', () => { jest.mocked(useProjects).mockReturnValue({ projects, onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, diff --git a/static/app/components/events/eventReplay/index.spec.tsx b/static/app/components/events/eventReplay/index.spec.tsx index af75b7c7116b3..e78c310e58dae 100644 --- a/static/app/components/events/eventReplay/index.spec.tsx +++ b/static/app/components/events/eventReplay/index.spec.tsx @@ -135,6 +135,7 @@ describe('EventReplay', function () { hasMore: false, initiallyLoaded: false, onSearch: () => Promise.resolve(), + reloadProjects: jest.fn(), placeholders: [], projects: [project], }); diff --git a/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.spec.tsx b/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.spec.tsx index d41ee11eec7e9..2a254deb77ccd 100644 --- a/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.spec.tsx +++ b/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.spec.tsx @@ -36,6 +36,7 @@ describe('Breadcrumbs', () => { hasMore: false, initiallyLoaded: false, onSearch: () => Promise.resolve(), + reloadProjects: jest.fn(), placeholders: [], projects: [project], }); diff --git a/static/app/components/replays/header/errorCounts.spec.tsx b/static/app/components/replays/header/errorCounts.spec.tsx index d79677c131764..afe8f7060dcb5 100644 --- a/static/app/components/replays/header/errorCounts.spec.tsx +++ b/static/app/components/replays/header/errorCounts.spec.tsx @@ -40,6 +40,7 @@ describe('ErrorCounts', () => { hasMore: false, initiallyLoaded: true, onSearch: () => Promise.resolve(), + reloadProjects: jest.fn(), placeholders: [], }); }); diff --git a/static/app/utils/replays/hooks/useReplayData.spec.tsx b/static/app/utils/replays/hooks/useReplayData.spec.tsx index baa9fa88b3639..2c1d204db7326 100644 --- a/static/app/utils/replays/hooks/useReplayData.spec.tsx +++ b/static/app/utils/replays/hooks/useReplayData.spec.tsx @@ -29,6 +29,7 @@ jest.mocked(useProjects).mockReturnValue({ hasMore: false, initiallyLoaded: true, onSearch: () => Promise.resolve(), + reloadProjects: jest.fn(), placeholders: [], }); diff --git a/static/app/utils/useProjects.tsx b/static/app/utils/useProjects.tsx index cdcfa7be2dabf..9bdb30d2706a8 100644 --- a/static/app/utils/useProjects.tsx +++ b/static/app/utils/useProjects.tsx @@ -58,6 +58,10 @@ type Result = { * The loaded projects list */ projects: Project[]; + /** + * Allows consumers to force refetch project data. + */ + reloadProjects: () => Promise; } & Pick; type Options = { @@ -199,7 +203,8 @@ function useProjects({limit, slugs, orgId: propOrgId}: Options = {}) { limit, }); - const fetchedProjects = uniqBy([...store.projects, ...results], ({slug}) => slug); + // Note the order of uniqBy: we prioritize project data recently fetched over previously cached data + const fetchedProjects = uniqBy([...results, ...store.projects], ({slug}) => slug); ProjectsStore.loadInitialData(fetchedProjects); setState(prev => ({ @@ -308,6 +313,7 @@ function useProjects({limit, slugs, orgId: propOrgId}: Options = {}) { fetchError, hasMore, onSearch: handleSearch, + reloadProjects: loadProjectsBySlug, }; return result; diff --git a/static/app/views/insights/browser/resources/views/resourcesLandingPage.spec.tsx b/static/app/views/insights/browser/resources/views/resourcesLandingPage.spec.tsx index 8c53b748456c0..c72736e791d4d 100644 --- a/static/app/views/insights/browser/resources/views/resourcesLandingPage.spec.tsx +++ b/static/app/views/insights/browser/resources/views/resourcesLandingPage.spec.tsx @@ -168,6 +168,7 @@ const setupMocks = () => { initiallyLoaded: true, projects: [ProjectFixture({hasInsightsAssets: true})], onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], }); }; diff --git a/static/app/views/insights/browser/webVitals/components/tables/pagePerformanceTable.spec.tsx b/static/app/views/insights/browser/webVitals/components/tables/pagePerformanceTable.spec.tsx index 52c2b26f31b65..151fb10053862 100644 --- a/static/app/views/insights/browser/webVitals/components/tables/pagePerformanceTable.spec.tsx +++ b/static/app/views/insights/browser/webVitals/components/tables/pagePerformanceTable.spec.tsx @@ -60,6 +60,7 @@ describe('PagePerformanceTable', function () { }), ], onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, diff --git a/static/app/views/insights/browser/webVitals/views/webVitalsLandingPage.spec.tsx b/static/app/views/insights/browser/webVitals/views/webVitalsLandingPage.spec.tsx index 55caaab03a1d2..97cd80613804c 100644 --- a/static/app/views/insights/browser/webVitals/views/webVitalsLandingPage.spec.tsx +++ b/static/app/views/insights/browser/webVitals/views/webVitalsLandingPage.spec.tsx @@ -26,6 +26,7 @@ describe('WebVitalsLandingPage', function () { jest.mocked(useProjects).mockReturnValue({ projects: [ProjectFixture({hasInsightsVitals: true})], onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, diff --git a/static/app/views/insights/cache/views/cacheLandingPage.spec.tsx b/static/app/views/insights/cache/views/cacheLandingPage.spec.tsx index 91646ebce390e..b8db986b5f208 100644 --- a/static/app/views/insights/cache/views/cacheLandingPage.spec.tsx +++ b/static/app/views/insights/cache/views/cacheLandingPage.spec.tsx @@ -71,6 +71,7 @@ describe('CacheLandingPage', function () { }), ], onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, @@ -302,6 +303,7 @@ describe('CacheLandingPage', function () { }), ], onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, diff --git a/static/app/views/insights/common/components/modulesOnboarding.spec.tsx b/static/app/views/insights/common/components/modulesOnboarding.spec.tsx index 0fdac56dd94f7..b1d8f1ea48ca1 100644 --- a/static/app/views/insights/common/components/modulesOnboarding.spec.tsx +++ b/static/app/views/insights/common/components/modulesOnboarding.spec.tsx @@ -26,6 +26,7 @@ describe('ModulesOnboarding', () => { jest.mocked(useProjects).mockReturnValue({ projects: [project], onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, @@ -66,6 +67,7 @@ describe('ModulesOnboarding', () => { jest.mocked(useProjects).mockReturnValue({ projects: [project], onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, @@ -105,6 +107,7 @@ describe('ModulesOnboarding', () => { jest.mocked(useProjects).mockReturnValue({ projects: [project], onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, diff --git a/static/app/views/insights/common/components/modulesOnboarding.tsx b/static/app/views/insights/common/components/modulesOnboarding.tsx index 7edbd0243ed9f..39d4415e83a51 100644 --- a/static/app/views/insights/common/components/modulesOnboarding.tsx +++ b/static/app/views/insights/common/components/modulesOnboarding.tsx @@ -1,4 +1,4 @@ -import {Fragment, useState} from 'react'; +import {Fragment, useEffect, useState} from 'react'; import styled from '@emotion/styled'; import startCase from 'lodash/startCase'; import {PlatformIcon} from 'platformicons'; @@ -21,6 +21,7 @@ import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {PlatformKey} from 'sentry/types/project'; import useOrganization from 'sentry/utils/useOrganization'; +import useProjects from 'sentry/utils/useProjects'; import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout'; import type {TitleableModuleNames} from 'sentry/views/insights/common/components/modulePageProviders'; import {useHasFirstSpan} from 'sentry/views/insights/common/queries/useHasFirstSpan'; @@ -42,8 +43,19 @@ export function ModulesOnboarding({ }) { const organization = useOrganization(); const onboardingProject = useOnboardingProject(); + const {reloadProjects} = useProjects(); const hasData = useHasFirstSpan(moduleName); + // Refetch the project metadata if the selected project does not have insights data, because + // we may have received insight data (and subsequently updated `Project.hasInsightxx`) + // after the initial project fetch. + useEffect(() => { + if (!hasData) { + reloadProjects(); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [hasData]); + if (onboardingProject) { return ( diff --git a/static/app/views/insights/database/views/databaseLandingPage.spec.tsx b/static/app/views/insights/database/views/databaseLandingPage.spec.tsx index efe0cd118b15a..31595a29e08ff 100644 --- a/static/app/views/insights/database/views/databaseLandingPage.spec.tsx +++ b/static/app/views/insights/database/views/databaseLandingPage.spec.tsx @@ -22,6 +22,7 @@ describe('DatabaseLandingPage', function () { jest.mocked(useProjects).mockReturnValue({ projects: [ProjectFixture({hasInsightsDb: true})], onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, diff --git a/static/app/views/insights/http/views/httpLandingPage.spec.tsx b/static/app/views/insights/http/views/httpLandingPage.spec.tsx index 5d254c03a2749..d232a6ca70938 100644 --- a/static/app/views/insights/http/views/httpLandingPage.spec.tsx +++ b/static/app/views/insights/http/views/httpLandingPage.spec.tsx @@ -60,6 +60,7 @@ describe('HTTPLandingPage', function () { }), ], onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, diff --git a/static/app/views/insights/mobile/common/queries/useCrossPlatformProject.spec.tsx b/static/app/views/insights/mobile/common/queries/useCrossPlatformProject.spec.tsx index 596ab581bb7fe..c5965687805af 100644 --- a/static/app/views/insights/mobile/common/queries/useCrossPlatformProject.spec.tsx +++ b/static/app/views/insights/mobile/common/queries/useCrossPlatformProject.spec.tsx @@ -39,6 +39,7 @@ function mockProjects(projects: Project[]) { hasMore: false, initiallyLoaded: false, onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], projects, }); diff --git a/static/app/views/insights/mobile/screenload/views/screenLoadSpansPage.spec.tsx b/static/app/views/insights/mobile/screenload/views/screenLoadSpansPage.spec.tsx index 4a909fd70e1df..75e560676ff57 100644 --- a/static/app/views/insights/mobile/screenload/views/screenLoadSpansPage.spec.tsx +++ b/static/app/views/insights/mobile/screenload/views/screenLoadSpansPage.spec.tsx @@ -26,6 +26,7 @@ function mockResponses(organization, project) { hasMore: false, initiallyLoaded: false, onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], projects: [project], }); diff --git a/static/app/views/insights/mobile/screenload/views/screenloadLandingPage.spec.tsx b/static/app/views/insights/mobile/screenload/views/screenloadLandingPage.spec.tsx index 92d626e0ae102..ad3310810ecf7 100644 --- a/static/app/views/insights/mobile/screenload/views/screenloadLandingPage.spec.tsx +++ b/static/app/views/insights/mobile/screenload/views/screenloadLandingPage.spec.tsx @@ -31,6 +31,7 @@ describe('PageloadModule', function () { hasMore: false, initiallyLoaded: false, onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], projects: [project], }); diff --git a/static/app/views/insights/queues/views/destinationSummaryPage.spec.tsx b/static/app/views/insights/queues/views/destinationSummaryPage.spec.tsx index 6922be5ba916e..f6c0af83557ba 100644 --- a/static/app/views/insights/queues/views/destinationSummaryPage.spec.tsx +++ b/static/app/views/insights/queues/views/destinationSummaryPage.spec.tsx @@ -46,6 +46,7 @@ describe('destinationSummaryPage', () => { jest.mocked(useProjects).mockReturnValue({ projects: [], onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, diff --git a/static/app/views/insights/queues/views/queuesLandingPage.spec.tsx b/static/app/views/insights/queues/views/queuesLandingPage.spec.tsx index 5db67954157c7..66d70bec26d1a 100644 --- a/static/app/views/insights/queues/views/queuesLandingPage.spec.tsx +++ b/static/app/views/insights/queues/views/queuesLandingPage.spec.tsx @@ -50,6 +50,7 @@ describe('queuesLandingPage', () => { jest.mocked(useProjects).mockReturnValue({ projects: [project], onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, diff --git a/static/app/views/monitors/components/monitorForm.spec.tsx b/static/app/views/monitors/components/monitorForm.spec.tsx index 739ac562746bb..11c6e2dd1eaaf 100644 --- a/static/app/views/monitors/components/monitorForm.spec.tsx +++ b/static/app/views/monitors/components/monitorForm.spec.tsx @@ -32,6 +32,7 @@ describe('MonitorForm', function () { hasMore: false, initiallyLoaded: false, onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], projects: [project], }); diff --git a/static/app/views/performance/content.tsx b/static/app/views/performance/content.tsx index d008d6acdb106..226860f36da78 100644 --- a/static/app/views/performance/content.tsx +++ b/static/app/views/performance/content.tsx @@ -49,7 +49,7 @@ type State = { function PerformanceContent({selection, location, demoMode, router}: Props) { const api = useApi(); const organization = useOrganization(); - const {projects} = useProjects(); + const {projects, reloadProjects} = useProjects(); const mounted = useRef(false); const previousDateTime = usePrevious(selection.datetime); const [state, setState] = useState({error: undefined}); @@ -107,6 +107,16 @@ function PerformanceContent({selection, location, demoMode, router}: Props) { tab: getLandingDisplayFromParam(location)?.field, }); + // Refetch the project metadata if the selected project does not have performance data, because + // we may have received performance data (and subsequently updated `Project.firstTransactionEvent`) + // after the initial project fetch. + useEffect(() => { + if (onboardingProject) { + reloadProjects(); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [onboardingProject]); + useEffect(() => { if (!mounted.current) { loadOrganizationTags(api, organization.slug, selection); diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.spec.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.spec.tsx index 7fe5a2948bae5..a9ab40dae9bd9 100644 --- a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.spec.tsx +++ b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.spec.tsx @@ -31,6 +31,7 @@ describe('SpanSummaryPage', function () { jest.mocked(useProjects).mockReturnValue({ projects: [], onSearch: jest.fn(), + reloadProjects: jest.fn(), placeholders: [], fetching: false, hasMore: null, From ceb7a44848e69dbb8fd29e4cee12df38006b0a39 Mon Sep 17 00:00:00 2001 From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com> Date: Wed, 31 Jul 2024 14:18:13 -0700 Subject: [PATCH 30/52] feat(issue-details): Update Event Level (#75296) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit this pr streamlines the event level so it is no longer a colored circle, now it just states what the level is for the group/event. also updates removes the icon for unhandled ![Screenshot 2024-07-31 at 10 25 02 AM](https://github.com/user-attachments/assets/fb5b6a7d-c9ca-4db9-b43f-2e7afd7d4d07) --- static/app/components/events/errorLevel.tsx | 23 ++++++++++++++++++- static/app/components/events/eventMessage.tsx | 12 +++++++--- .../group/inboxBadges/unhandledTag.tsx | 4 +++- ...er.spec.tsx => streamlinedHeader.spec.tsx} | 7 ++++-- .../views/issueDetails/streamlinedHeader.tsx | 1 + 5 files changed, 40 insertions(+), 7 deletions(-) rename static/app/views/issueDetails/{updatedHeader.spec.tsx => streamlinedHeader.spec.tsx} (92%) diff --git a/static/app/components/events/errorLevel.tsx b/static/app/components/events/errorLevel.tsx index b010c54174546..86879c9aa5e94 100644 --- a/static/app/components/events/errorLevel.tsx +++ b/static/app/components/events/errorLevel.tsx @@ -1,21 +1,42 @@ +import {Fragment} from 'react'; import styled from '@emotion/styled'; +import Divider from 'sentry/components/events/interfaces/debugMeta/debugImageDetails/candidate/information/divider'; +import UnhandledTag from 'sentry/components/group/inboxBadges/unhandledTag'; import {Tooltip} from 'sentry/components/tooltip'; import {tct} from 'sentry/locale'; import type {Level} from 'sentry/types/event'; import {capitalize} from 'sentry/utils/string/capitalize'; +import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils'; const DEFAULT_SIZE = '13px'; type Props = { className?: string; level?: Level; + showUnhandled?: boolean; size?: string; }; -function ErrorLevel({className, level = 'unknown', size = '11px'}: Props) { +function ErrorLevel({className, showUnhandled, level = 'unknown', size = '11px'}: Props) { + const hasStreamlinedUI = useHasStreamlinedUI(); const levelLabel = tct('Level: [level]', {level: capitalize(level)}); + if (hasStreamlinedUI) { + return ( + + {showUnhandled ? ( + + + + + ) : null} + {capitalize(level)} + + + ); + } + return ( diff --git a/static/app/components/events/eventMessage.tsx b/static/app/components/events/eventMessage.tsx index 288c7a462dbbd..4739883610356 100644 --- a/static/app/components/events/eventMessage.tsx +++ b/static/app/components/events/eventMessage.tsx @@ -5,6 +5,7 @@ import UnhandledTag from 'sentry/components/group/inboxBadges/unhandledTag'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import {EventOrGroupType, type Level} from 'sentry/types/event'; +import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils'; type Props = { type: EventOrGroupType; @@ -31,9 +32,12 @@ function EventOrGroupLevel({ level, levelIndicatorSize, type, -}: Pick) { + showUnhandled, +}: Pick) { if (level && EVENT_TYPES_WITH_LOG_LEVEL.has(type)) { - return ; + return ( + + ); } return null; @@ -48,14 +52,16 @@ function EventMessage({ type, showUnhandled = false, }: Props) { + const hasStreamlinedUI = useHasStreamlinedUI(); return ( - {showUnhandled ? : null} + {showUnhandled && !hasStreamlinedUI ? : null} {message ? ( {message} ) : ( diff --git a/static/app/components/group/inboxBadges/unhandledTag.tsx b/static/app/components/group/inboxBadges/unhandledTag.tsx index 120e07e7e6589..3e75586defa30 100644 --- a/static/app/components/group/inboxBadges/unhandledTag.tsx +++ b/static/app/components/group/inboxBadges/unhandledTag.tsx @@ -3,12 +3,14 @@ import styled from '@emotion/styled'; import {Tooltip} from 'sentry/components/tooltip'; import {IconFatal} from 'sentry/icons'; import {t} from 'sentry/locale'; +import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils'; function UnhandledTag() { + const hasStreamlinedUI = useHasStreamlinedUI(); return ( - + {!hasStreamlinedUI && } {t('Unhandled')} diff --git a/static/app/views/issueDetails/updatedHeader.spec.tsx b/static/app/views/issueDetails/streamlinedHeader.spec.tsx similarity index 92% rename from static/app/views/issueDetails/updatedHeader.spec.tsx rename to static/app/views/issueDetails/streamlinedHeader.spec.tsx index 5f5661d992141..b9be24350caa3 100644 --- a/static/app/views/issueDetails/updatedHeader.spec.tsx +++ b/static/app/views/issueDetails/streamlinedHeader.spec.tsx @@ -13,12 +13,12 @@ import {ReprocessingStatus} from 'sentry/views/issueDetails/utils'; describe('UpdatedGroupHeader', () => { const baseUrl = 'BASE_URL/'; - const organization = OrganizationFixture(); + const organization = OrganizationFixture({features: ['issue-details-streamline']}); const project = ProjectFixture({ platform: 'javascript', teams: [TeamFixture()], }); - const group = GroupFixture({issueCategory: IssueCategory.ERROR}); + const group = GroupFixture({issueCategory: IssueCategory.ERROR, isUnhandled: true}); describe('JS Project Error Issue', () => { const defaultProps = { @@ -81,6 +81,9 @@ describe('UpdatedGroupHeader', () => { expect(await screen.findByText('RequestError')).toBeInTheDocument(); + expect(await screen.findByText('Warning')).toBeInTheDocument(); + expect(await screen.findByText('Unhandled')).toBeInTheDocument(); + expect(await screen.findByText('First Seen in')).toBeInTheDocument(); expect(await screen.findByText('Last Seen in')).toBeInTheDocument(); diff --git a/static/app/views/issueDetails/streamlinedHeader.tsx b/static/app/views/issueDetails/streamlinedHeader.tsx index 969bb77120b0c..2c2a76700e6a6 100644 --- a/static/app/views/issueDetails/streamlinedHeader.tsx +++ b/static/app/views/issueDetails/streamlinedHeader.tsx @@ -117,6 +117,7 @@ export default function StreamlinedGroupHeader({ From 91b38136c814f0bccc420d2ceec1ab1ab2d85141 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 31 Jul 2024 17:52:24 -0400 Subject: [PATCH 31/52] ref: delete unused JSONResponse (#75382) split out from a branch where I am strongly typing plugin configure views last referenced in f50e4601b2987de730243da79cb26ef3727ed2c4 --- src/sentry/plugins/base/response.py | 15 +-------------- tests/sentry/plugins/base/test_response.py | 11 ----------- 2 files changed, 1 insertion(+), 25 deletions(-) delete mode 100644 tests/sentry/plugins/base/test_response.py diff --git a/src/sentry/plugins/base/response.py b/src/sentry/plugins/base/response.py index 6b1d4c5b793b2..f17a5181a2336 100644 --- a/src/sentry/plugins/base/response.py +++ b/src/sentry/plugins/base/response.py @@ -1,9 +1,7 @@ -__all__ = ("Response", "JSONResponse") - from django.http import HttpResponse from django.template.context_processors import csrf -from sentry.utils import json +__all__ = ("Response",) class Response: @@ -26,14 +24,3 @@ def render(self, request, context=None): context.update(csrf(request)) return render_to_string(self.template, context, request) - - -class JSONResponse(Response): - def __init__(self, context, status=200): - self.context = context - self.status = status - - def respond(self, request, context=None): - return HttpResponse( - json.dumps(self.context), content_type="application/json", status=self.status - ) diff --git a/tests/sentry/plugins/base/test_response.py b/tests/sentry/plugins/base/test_response.py deleted file mode 100644 index 6ea9d81ecb029..0000000000000 --- a/tests/sentry/plugins/base/test_response.py +++ /dev/null @@ -1,11 +0,0 @@ -from sentry.plugins.base.response import JSONResponse - - -def test_json_response(): - resp = JSONResponse({}).respond(None) - assert resp.status_code == 200 - - -def test_json_response_with_status_kwarg(): - resp = JSONResponse({}, status=400).respond(None) - assert resp.status_code == 400 From ca43622c12f8c921ec6af21249f3047d4ff17395 Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Wed, 31 Jul 2024 14:54:14 -0700 Subject: [PATCH 32/52] feat(toolbar): Allow setting overrides for UI feature flags via the toolbar (#75305) ![SCR-20240730-mbvy](https://github.com/user-attachments/assets/02c77306-1325-4fc5-9fbf-a157f5f4d42f) Fixes https://github.com/getsentry/sentry/issues/75161 --- static/app/actionCreators/organization.tsx | 3 + .../featureFlags/customOverride.tsx | 60 +++++ .../featureFlags/featureFlagItem.tsx | 98 +++++++++ .../featureFlags/featureFlagsContext.tsx | 74 +++++++ .../featureFlags/featureFlagsPanel.tsx | 207 ++++++++++++------ .../featureFlags/useEnabledFeatureFlags.tsx | 6 - .../devtoolbar/hooks/useConfiguration.tsx | 3 +- static/app/components/devtoolbar/types.ts | 12 +- static/app/utils/featureFlagOverrides.spec.ts | 161 ++++++++++++++ static/app/utils/featureFlagOverrides.ts | 118 ++++++++++ static/app/utils/useDevToolbar.tsx | 20 +- 11 files changed, 683 insertions(+), 79 deletions(-) create mode 100644 static/app/components/devtoolbar/components/featureFlags/customOverride.tsx create mode 100644 static/app/components/devtoolbar/components/featureFlags/featureFlagItem.tsx create mode 100644 static/app/components/devtoolbar/components/featureFlags/featureFlagsContext.tsx delete mode 100644 static/app/components/devtoolbar/components/featureFlags/useEnabledFeatureFlags.tsx create mode 100644 static/app/utils/featureFlagOverrides.spec.ts create mode 100644 static/app/utils/featureFlagOverrides.ts diff --git a/static/app/actionCreators/organization.tsx b/static/app/actionCreators/organization.tsx index a6b50540408d8..960bbd1899519 100644 --- a/static/app/actionCreators/organization.tsx +++ b/static/app/actionCreators/organization.tsx @@ -14,6 +14,7 @@ import ProjectsStore from 'sentry/stores/projectsStore'; import TeamStore from 'sentry/stores/teamStore'; import type {Organization, Team} from 'sentry/types/organization'; import type {Project} from 'sentry/types/project'; +import FeatureFlagOverrides from 'sentry/utils/featureFlagOverrides'; import {getPreloadedDataPromise} from 'sentry/utils/getPreloadedData'; import parseLinkHeader from 'sentry/utils/parseLinkHeader'; @@ -39,6 +40,8 @@ async function fetchOrg( throw new Error('retrieved organization is falsey'); } + FeatureFlagOverrides.singleton().loadOrg(org); + OrganizationStore.onUpdate(org, {replace: true}); setActiveOrganization(org); diff --git a/static/app/components/devtoolbar/components/featureFlags/customOverride.tsx b/static/app/components/devtoolbar/components/featureFlags/customOverride.tsx new file mode 100644 index 0000000000000..9369615126c03 --- /dev/null +++ b/static/app/components/devtoolbar/components/featureFlags/customOverride.tsx @@ -0,0 +1,60 @@ +import {useContext, useState} from 'react'; + +import {Button} from 'sentry/components/button'; +import Input from 'sentry/components/input'; +import Switch from 'sentry/components/switchButton'; +import {IconAdd} from 'sentry/icons'; + +import useConfiguration from '../../hooks/useConfiguration'; +import {AnalyticsContext} from '../analyticsProvider'; + +import {useFeatureFlagsContext} from './featureFlagsContext'; + +export default function CustomOverride() { + const {eventName, eventKey} = useContext(AnalyticsContext); + const {trackAnalytics} = useConfiguration(); + const {setOverride} = useFeatureFlagsContext(); + + const [name, setName] = useState(''); + const [isActive, setIsActive] = useState(false); + + return ( +
{ + e.preventDefault(); + setOverride(name, isActive); + setName(''); + setIsActive(false); + trackAnalytics?.({ + eventKey: eventKey + '.created', + eventName: eventName + ' created', + }); + }} + > + setName(e.target.value.toLowerCase())} + /> + { + setIsActive(!isActive); + }} + /> + + + ); +} diff --git a/static/app/components/devtoolbar/components/featureFlags/featureFlagItem.tsx b/static/app/components/devtoolbar/components/featureFlags/featureFlagItem.tsx new file mode 100644 index 0000000000000..fbe1c5b3f0dba --- /dev/null +++ b/static/app/components/devtoolbar/components/featureFlags/featureFlagItem.tsx @@ -0,0 +1,98 @@ +import {Fragment, useContext, useState} from 'react'; + +import AnalyticsProvider, { + AnalyticsContext, +} from 'sentry/components/devtoolbar/components/analyticsProvider'; +import ExternalLink from 'sentry/components/links/externalLink'; +import {Cell} from 'sentry/components/replays/virtualizedGrid/bodyCell'; +import Switch from 'sentry/components/switchButton'; + +import useConfiguration from '../../hooks/useConfiguration'; +import {inlineLinkCss} from '../../styles/link'; +import {panelInsetContentCss} from '../../styles/panel'; +import {smallCss} from '../../styles/typography'; +import type {FlagValue} from '../../types'; + +import {useFeatureFlagsContext} from './featureFlagsContext'; + +type FeatureFlag = {name: string; override: FlagValue; value: FlagValue}; + +export default function FeatureFlagItem({flag}: {flag: FeatureFlag}) { + const {featureFlags} = useConfiguration(); + + return ( + + + {featureFlags?.urlTemplate?.(flag.name) ? ( + + {flag.name} + + ) : ( + {flag.name} + )} + + + + + + ); +} + +function FlagValueInput({flag}: {flag: FeatureFlag}) { + if ( + typeof flag.value === 'boolean' || + flag.override === true || + flag.override === false + ) { + return ( + + + + ); + } + + return ( + + {flag.override !== undefined ? String(flag.override) : String(flag.value)} + + ); +} + +function FlagValueBooleanInput({flag}: {flag: FeatureFlag}) { + const {eventName, eventKey} = useContext(AnalyticsContext); + const {trackAnalytics} = useConfiguration(); + const {setOverride} = useFeatureFlagsContext(); + + const [isActive, setIsActive] = useState( + flag.override !== undefined ? Boolean(flag.override) : Boolean(flag.value) + ); + + return ( + + ); +} diff --git a/static/app/components/devtoolbar/components/featureFlags/featureFlagsContext.tsx b/static/app/components/devtoolbar/components/featureFlags/featureFlagsContext.tsx new file mode 100644 index 0000000000000..6bff7124aee06 --- /dev/null +++ b/static/app/components/devtoolbar/components/featureFlags/featureFlagsContext.tsx @@ -0,0 +1,74 @@ +import type {ReactNode} from 'react'; +import {createContext, useCallback, useContext, useState} from 'react'; + +import useConfiguration from 'sentry/components/devtoolbar/hooks/useConfiguration'; +import type {FeatureFlagMap, FlagValue} from 'sentry/components/devtoolbar/types'; + +interface Context { + /** + * Call through to the user-supplied clearOverrides() function to reset override state. + */ + clearOverrides: () => void; + + /** + * The map of effective feature flags. + */ + featureFlagMap: FeatureFlagMap; + + /** + * Whether the state of overridden flags has changed in this session. After + * state is changed you must reload the page to ensure that you're getting a + * consistent experience. + */ + isDirty: boolean; + + /** + * Set an override. Marks the state as dirty. + * + * Setting an override back to default will not un-mark the dirty flag. + */ + setOverride: (name: string, value: FlagValue) => void; +} + +const FeatureFlagContext = createContext({ + clearOverrides: () => {}, + featureFlagMap: {}, + isDirty: false, + setOverride: () => {}, +}); + +export function FeatureFlagsContextProvider({children}: {children: ReactNode}) { + const {featureFlags} = useConfiguration(); + + const [isDirty, setIsDirty] = useState(false); + const [featureFlagMap, setFeatureFlagMap] = useState( + () => featureFlags?.getFeatureFlagMap?.() ?? {} + ); + + const setOverride = useCallback( + (name: string, value: FlagValue) => { + featureFlags?.setOverrideValue?.(name, value); + setIsDirty(true); + setFeatureFlagMap(featureFlags?.getFeatureFlagMap?.() ?? {}); + }, + [featureFlags] + ); + + const clearOverrides = useCallback(() => { + featureFlags?.clearOverrides?.(); + setIsDirty(true); + setFeatureFlagMap(featureFlags?.getFeatureFlagMap?.() ?? {}); + }, [featureFlags]); + + return ( + + {children} + + ); +} + +export function useFeatureFlagsContext() { + return useContext(FeatureFlagContext); +} diff --git a/static/app/components/devtoolbar/components/featureFlags/featureFlagsPanel.tsx b/static/app/components/devtoolbar/components/featureFlags/featureFlagsPanel.tsx index b437c18292088..9045f2a2bea40 100644 --- a/static/app/components/devtoolbar/components/featureFlags/featureFlagsPanel.tsx +++ b/static/app/components/devtoolbar/components/featureFlags/featureFlagsPanel.tsx @@ -1,82 +1,159 @@ -import {useRef, useState} from 'react'; +import {type Dispatch, Fragment, type SetStateAction, useState} from 'react'; -import AnalyticsProvider from 'sentry/components/devtoolbar/components/analyticsProvider'; -import useEnabledFeatureFlags from 'sentry/components/devtoolbar/components/featureFlags/useEnabledFeatureFlags'; -import {inlineLinkCss} from 'sentry/components/devtoolbar/styles/link'; -import EmptyStateWarning from 'sentry/components/emptyStateWarning'; +import {Button} from 'sentry/components/button'; import Input from 'sentry/components/input'; -import ExternalLink from 'sentry/components/links/externalLink'; import {PanelTable} from 'sentry/components/panels/panelTable'; -import {Cell} from 'sentry/components/replays/virtualizedGrid/bodyCell'; +import {SegmentedControl} from 'sentry/components/segmentedControl'; -import useConfiguration from '../../hooks/useConfiguration'; import {panelInsetContentCss, panelSectionCss} from '../../styles/panel'; import {smallCss} from '../../styles/typography'; +import AnalyticsProvider from '../analyticsProvider'; import PanelLayout from '../panelLayout'; +import CustomOverride from './customOverride'; +import FeatureFlagItem from './featureFlagItem'; +import {FeatureFlagsContextProvider, useFeatureFlagsContext} from './featureFlagsContext'; + +type Prefilter = 'all' | 'overrides'; + export default function FeatureFlagsPanel() { - const featureFlags = useEnabledFeatureFlags(); - const {organizationSlug, featureFlagTemplateUrl} = useConfiguration(); + const [prefilter, setPrefilter] = useState('all'); const [searchTerm, setSearchTerm] = useState(''); - const searchInput = useRef(null); - const filteredItems = featureFlags - ?.filter(s => s.toLowerCase().includes(searchTerm)) - .sort(); + return ( + + +
+ +
+ +
+
+
+ + + +
+
+
+ + + +
+
+
+
+ ); +} + +function IsDirtyMessage() { + const {isDirty} = useFeatureFlagsContext(); + + return isDirty ? ( +
+ Reload to see changes +
+ ) : ( +
+ ); +} +function Filters({ + setPrefilter, + prefilter, + setSearchTerm, +}: { + prefilter: Prefilter; + setPrefilter: Dispatch>; + setSearchTerm: Dispatch>; +}) { + const {clearOverrides} = useFeatureFlagsContext(); return ( - -
- - Flags enabled for {organizationSlug} - + +
+ onChange={setPrefilter} size="xs" value={prefilter}> + All Flags + Overrides Only +
- - setSearchTerm(e.target.value.toLowerCase())} - /> -
, - ]} - stickyHeaders - css={[ - { - border: 'none', - padding: 0, - '&>:first-child': { - minHeight: 'unset', - padding: 'var(--space50) var(--space150)', - }, - }, - ]} + + setSearchTerm(e.target.value.toLowerCase())} + placeholder="Search flags" + size="xs" + /> + + ); +} + +function FlagTable({prefilter, searchTerm}: {prefilter: string; searchTerm: string}) { + const {featureFlagMap} = useFeatureFlagsContext(); + + const filtered = Object.fromEntries( + Object.entries(featureFlagMap)?.filter(([name, {value, override}]) => { + const overrideOnly = prefilter === 'overrides'; + const isOverridden = override !== undefined && value !== override; + const matchesSearch = name + .toLocaleLowerCase() + .includes(searchTerm.toLocaleLowerCase()); + return overrideOnly ? isOverridden && matchesSearch : matchesSearch; + }) + ); + const names = Object.keys(filtered).sort(); + + return ( + :first-child': { + minHeight: 'unset', + padding: 'var(--space50) var(--space150)', + }, + }, + ]} + headers={[ + Name, + Value, + ]} + stickyHeaders + > + {names?.map(name => ( + + + + ))} + ); } diff --git a/static/app/components/devtoolbar/components/featureFlags/useEnabledFeatureFlags.tsx b/static/app/components/devtoolbar/components/featureFlags/useEnabledFeatureFlags.tsx deleted file mode 100644 index bb1370c7bd71d..0000000000000 --- a/static/app/components/devtoolbar/components/featureFlags/useEnabledFeatureFlags.tsx +++ /dev/null @@ -1,6 +0,0 @@ -import useConfiguration from 'sentry/components/devtoolbar/hooks/useConfiguration'; - -export default function useEnabledFeatureFlags() { - const {featureFlags} = useConfiguration(); - return featureFlags; -} diff --git a/static/app/components/devtoolbar/hooks/useConfiguration.tsx b/static/app/components/devtoolbar/hooks/useConfiguration.tsx index 72ea5be427cc1..f8c085283a074 100644 --- a/static/app/components/devtoolbar/hooks/useConfiguration.tsx +++ b/static/app/components/devtoolbar/hooks/useConfiguration.tsx @@ -5,13 +5,12 @@ import type {Configuration} from '../types'; const context = createContext({ apiPrefix: '', environment: ['production'], + featureFlags: {}, organizationSlug: '', placement: 'right-edge', projectId: 0, projectPlatform: '', projectSlug: '', - featureFlags: [], - featureFlagTemplateUrl: undefined, }); export function ConfigurationContextProvider({ diff --git a/static/app/components/devtoolbar/types.ts b/static/app/components/devtoolbar/types.ts index 1255bb7969c83..05391bb4d9c06 100644 --- a/static/app/components/devtoolbar/types.ts +++ b/static/app/components/devtoolbar/types.ts @@ -2,6 +2,9 @@ import type SentrySDK from '@sentry/react'; // TODO: change to `@sentry/browser` export type {FeedbackIssueListItem} from 'sentry/utils/feedback/types'; +export type FlagValue = boolean | string | number | undefined; +export type FeatureFlagMap = Record; + export type Configuration = { apiPrefix: string; environment: string | string[]; @@ -12,8 +15,13 @@ export type Configuration = { projectSlug: string; SentrySDK?: typeof SentrySDK; domId?: string; - featureFlagTemplateUrl?: undefined | ((flag: string) => string | undefined); - featureFlags?: string[]; + featureFlags?: { + clearOverrides?: () => void; + getFeatureFlagMap?: () => FeatureFlagMap; + setOverrideValue?: (name: string, override: FlagValue) => void; + urlTemplate?: (name: string) => string | undefined; + }; + trackAnalytics?: (props: {eventKey: string; eventName: string}) => void; }; diff --git a/static/app/utils/featureFlagOverrides.spec.ts b/static/app/utils/featureFlagOverrides.spec.ts new file mode 100644 index 0000000000000..156736974cf60 --- /dev/null +++ b/static/app/utils/featureFlagOverrides.spec.ts @@ -0,0 +1,161 @@ +import {OrganizationFixture} from 'sentry-fixture/organization'; + +import FeatureFlagOverrides from 'sentry/utils/featureFlagOverrides'; +import localStorageWrapper from 'sentry/utils/localStorage'; + +const LOCALSTORAGE_KEY = 'feature-flag-overrides'; + +describe('FeatureFlagOverrides', () => { + let organization; + beforeEach(() => { + localStorage.clear(); + + organization = OrganizationFixture({ + features: ['enable-issues', 'enable-profiling', 'enable-replay'], + }); + }); + + describe('setStoredOverride', () => { + it('should insert new flag names into localstorage', () => { + expect(localStorageWrapper.getItem(LOCALSTORAGE_KEY)).toBeNull(); + const inst = new FeatureFlagOverrides(); + + inst.setStoredOverride('enable-issues', false); + expect(localStorageWrapper.getItem(LOCALSTORAGE_KEY)).toBe( + '{"enable-issues":false}' + ); + + inst.setStoredOverride('enable-issues', true); + expect(localStorageWrapper.getItem(LOCALSTORAGE_KEY)).toBe( + '{"enable-issues":true}' + ); + }); + + it('should preserve other flag overrides in localstorage', () => { + localStorageWrapper.setItem( + LOCALSTORAGE_KEY, + '{"enable-issues":true,"enable-profiling":false}' + ); + const inst = new FeatureFlagOverrides(); + + inst.setStoredOverride('enable-replay', false); + expect(localStorageWrapper.getItem(LOCALSTORAGE_KEY)).toEqual( + '{"enable-issues":true,"enable-profiling":false,"enable-replay":false}' + ); + + inst.setStoredOverride('enable-replay', true); + expect(localStorageWrapper.getItem(LOCALSTORAGE_KEY)).toEqual( + '{"enable-issues":true,"enable-profiling":false,"enable-replay":true}' + ); + }); + + it('should set localstorage, even if the original value is malformed', () => { + localStorageWrapper.setItem(LOCALSTORAGE_KEY, 'this is not an object {}'); + const inst = new FeatureFlagOverrides(); + + inst.setStoredOverride('enable-issues', false); + expect(localStorageWrapper.getItem(LOCALSTORAGE_KEY)).toEqual( + '{"enable-issues":false}' + ); + }); + }); + + describe('getFeatureFlagMap', () => { + it('should combine & remove features that are disabled locally', () => { + localStorageWrapper.setItem( + LOCALSTORAGE_KEY, + '{"enable-issues":false,"enable-profiling":true}' + ); + const inst = new FeatureFlagOverrides(); + + expect(inst.getFeatureFlagMap(organization)).toEqual({ + 'enable-issues': {value: true, override: false}, + 'enable-profiling': {value: true, override: true}, + 'enable-replay': {value: true, override: undefined}, + }); + }); + + it('should combine & add features that are listed locally, but not in the org', () => { + localStorageWrapper.setItem( + LOCALSTORAGE_KEY, + '{"enable-issues":false,"secret-new-feature":true,"local-only-feature":false}' + ); + const inst = new FeatureFlagOverrides(); + + expect(inst.getFeatureFlagMap(organization)).toEqual({ + 'enable-issues': {value: true, override: false}, + 'enable-profiling': {value: true, override: undefined}, + 'enable-replay': {value: true, override: undefined}, + 'secret-new-feature': {value: undefined, override: true}, + 'local-only-feature': {value: undefined, override: false}, + }); + }); + }); + + describe('getEnabledFeatureFlagList', () => { + it('should combine & remove features that are disabled locally', () => { + localStorageWrapper.setItem( + LOCALSTORAGE_KEY, + '{"enable-issues":false,"enable-profiling":true}' + ); + const inst = new FeatureFlagOverrides(); + + expect(inst.getEnabledFeatureFlagList(organization)).toEqual([ + 'enable-profiling', + 'enable-replay', + ]); + }); + + it('should combine & add features that are listed locally, but not in the org', () => { + localStorageWrapper.setItem( + LOCALSTORAGE_KEY, + '{"enable-issues":false,"secret-new-feature":true,"local-only-feature":false}' + ); + const inst = new FeatureFlagOverrides(); + + expect(inst.getEnabledFeatureFlagList(organization)).toEqual([ + 'enable-profiling', + 'enable-replay', + 'secret-new-feature', + ]); + }); + }); + + describe('loadOrg', () => { + it('should override the features on an org with the combined list', () => { + localStorageWrapper.setItem( + LOCALSTORAGE_KEY, + '{"enable-issues":false,"secret-new-feature":true,"local-only-feature":false}' + ); + const inst = new FeatureFlagOverrides(); + + expect(organization.features).toEqual([ + 'enable-issues', + 'enable-profiling', + 'enable-replay', + ]); + + inst.loadOrg(organization); + + expect(organization.features).toEqual([ + 'enable-profiling', + 'enable-replay', + 'secret-new-feature', + ]); + + expect(inst.getFeatureFlagMap(organization)).toEqual({ + 'enable-issues': {value: true, override: false}, + 'enable-profiling': {value: true, override: undefined}, + 'enable-replay': {value: true, override: undefined}, + 'secret-new-feature': {value: undefined, override: true}, + 'local-only-feature': {value: undefined, override: false}, + }); + + expect(inst.getEnabledFeatureFlagList(organization)).toEqual([ + 'enable-profiling', + 'enable-replay', + 'secret-new-feature', + ]); + }); + }); +}); diff --git a/static/app/utils/featureFlagOverrides.ts b/static/app/utils/featureFlagOverrides.ts new file mode 100644 index 0000000000000..55dff4e784b8e --- /dev/null +++ b/static/app/utils/featureFlagOverrides.ts @@ -0,0 +1,118 @@ +import type {Organization} from 'sentry/types/organization'; +import localStorageWrapper from 'sentry/utils/localStorage'; + +type OverrideState = Record; + +// TODO(ryan953): this should import from the devtoolbar definition +type FlagValue = boolean | string | number | undefined; +export type FeatureFlagMap = Record; + +const LOCALSTORAGE_KEY = 'feature-flag-overrides'; + +let __SINGLETON: FeatureFlagOverrides | null = null; + +export default class FeatureFlagOverrides { + /** + * Return the same instance of FeatureFlagOverrides in each part of the app. + * + * Multiple instances of FeatureFlagOverrides are needed by tests only. + */ + public static singleton() { + if (!__SINGLETON) { + __SINGLETON = new FeatureFlagOverrides(); + } + return __SINGLETON; + } + + /** + * Instead of storing the original & overridden values on the org itself we're + * using this cache instead. Having the cache on the side means we don't need + * to change the Organization type to add a pr + */ + private _originalValues = new WeakMap(); + + /** + * Set an override value into localStorage, so that the next time the page + * loads we can read it and apply it to the org. + */ + public setStoredOverride(name: string, value: boolean): void { + try { + const prev = this._getStoredOverrides(); + const updated: OverrideState = {...prev, [name]: value}; + localStorageWrapper.setItem(LOCALSTORAGE_KEY, JSON.stringify(updated)); + } catch { + // + } + } + + public clear(): void { + localStorageWrapper.setItem(LOCALSTORAGE_KEY, '{}'); + } + + private _getStoredOverrides(): OverrideState { + try { + return JSON.parse(localStorageWrapper.getItem(LOCALSTORAGE_KEY) ?? '{}'); + } catch { + return {}; + } + } + + /** + * Convert the list of enabled org-features into a FeatureFlapMap and cache it + * This cached list is only the original values that the server told us, but + * in a format we can add overrides to later. + */ + private _getNonOverriddenFeatures(organization: Organization): FeatureFlagMap { + if (this._originalValues.has(organization)) { + // @ts-expect-error: We just checked .has(), so it shouldn't be undefined + return this._originalValues.get(organization); + } + + const nonOverriddenFeatures = Object.fromEntries( + organization.features.map(name => [name, {value: true, override: undefined}]) + ); + this._originalValues.set(organization, nonOverriddenFeatures); + return nonOverriddenFeatures; + } + + /** + * Return the effective featureFlags as a map, for the toolbar + */ + public getFeatureFlagMap(organization: Organization): FeatureFlagMap { + const nonOverriddenFeatures = this._getNonOverriddenFeatures(organization); + const overrides = this._getStoredOverrides(); + + const clone: FeatureFlagMap = {...nonOverriddenFeatures}; + + for (const [name, override] of Object.entries(overrides)) { + clone[name] = {value: clone[name]?.value, override}; + } + return clone; + } + + /** + * Return the effective featureFlags as an array, for `organization.features` + */ + public getEnabledFeatureFlagList(organization: Organization): string[] { + const nonOverriddenFeatures = this._getNonOverriddenFeatures(organization); + const overrides = this._getStoredOverrides(); + + const names = new Set(Object.keys(nonOverriddenFeatures)); + + for (const [name, override] of Object.entries(overrides)) { + if (override) { + names.add(name); + } else { + names.delete(name); + } + } + return Array.from(names); + } + + /** + * Stash the original list of features & override organization.features with the effective list of features + */ + public loadOrg(organization: Organization) { + organization.features = this.getEnabledFeatureFlagList(organization); + } +} diff --git a/static/app/utils/useDevToolbar.tsx b/static/app/utils/useDevToolbar.tsx index bc4724ff3e89e..beb2c345a3f79 100644 --- a/static/app/utils/useDevToolbar.tsx +++ b/static/app/utils/useDevToolbar.tsx @@ -3,6 +3,7 @@ import * as Sentry from '@sentry/react'; import DevToolbar from 'sentry/components/devtoolbar'; import {rawTrackAnalyticsEvent} from 'sentry/utils/analytics'; +import FeatureFlagOverrides from 'sentry/utils/featureFlagOverrides'; import useOrganization from 'sentry/utils/useOrganization'; import {useUser} from 'sentry/utils/useUser'; @@ -28,10 +29,21 @@ export default function useDevToolbar({enabled}: {enabled: boolean}) { projectId: 11276, projectPlatform: 'javascript', projectSlug: 'javascript', - featureFlags: organization.features, - featureFlagTemplateUrl: flag => - `https://github.com/search?q=repo%3Agetsentry%2Fsentry-options-automator+OR+repo%3Agetsentry%2Fsentry+${flag}&type=code`, - + featureFlags: { + getFeatureFlagMap: () => + FeatureFlagOverrides.singleton().getFeatureFlagMap(organization), + urlTemplate: flag => + `https://github.com/search?q=repo%3Agetsentry%2Fsentry-options-automator+OR+repo%3Agetsentry%2Fsentry+${flag}&type=code`, + setOverrideValue: (name, value) => { + // only boolean flags in sentry + if (typeof value === 'boolean') { + FeatureFlagOverrides.singleton().setStoredOverride(name, value); + } + }, + clearOverrides: () => { + FeatureFlagOverrides.singleton().clear(); + }, + }, trackAnalytics: (props: {eventKey: string; eventName: string}) => rawTrackAnalyticsEvent({...props, email, organization}), }); From 072e2b3c552edaaae1389fc753c8a31c2033e6cc Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Wed, 31 Jul 2024 15:07:05 -0700 Subject: [PATCH 33/52] chore(utils): Remove unused `with_circuit_breaker` (#75373) Now that calls to the Seer similarity service use the new `CircuitBreaker` class[1], `with_circuit_breaker` is unused, and it and its tests can be removed. [1] https://github.com/getsentry/sentry/pull/74898 --- src/sentry/utils/circuit_breaker.py | 116 +----------------- tests/sentry/utils/test_circuit_breaker.py | 134 --------------------- 2 files changed, 1 insertion(+), 249 deletions(-) diff --git a/src/sentry/utils/circuit_breaker.py b/src/sentry/utils/circuit_breaker.py index f03b5d84cc3f3..b867685626369 100644 --- a/src/sentry/utils/circuit_breaker.py +++ b/src/sentry/utils/circuit_breaker.py @@ -3,17 +3,13 @@ the `CircuitBreaker` class found in `circuit_breaker2.py` instead. """ -from collections.abc import Callable -from typing import ParamSpec, TypedDict, TypeVar +from typing import TypedDict from django.core.cache import cache from sentry import ratelimits as ratelimiter from sentry.utils import metrics -# TODO: Right now this circuit breaker is based on count of consecutive errors. We should consider -# whether basing it on percentage of failed requests would be better. - DEFAULT_ERROR_LIMIT = 30 ERROR_COUNT_CACHE_KEY = lambda key: f"circuit_breaker:{key}-error-count" PASSTHROUGH_RATELIMIT_KEY = lambda key: f"circuit_breaker:{key}-passthrough" @@ -24,10 +20,6 @@ class CircuitBreakerPassthrough(TypedDict, total=True): window: int -class CircuitBreakerTripped(Exception): - pass - - class CircuitBreakerConfig(TypedDict, total=False): # The number of consecutive failures within a given window required to trigger the circuit breaker error_limit: int @@ -52,12 +44,6 @@ class CircuitBreakerConfig(TypedDict, total=False): passthrough_attempts_per_interval=1, ) -# TODO: Once we're on python 3.12, we can get rid of these and change the first line of the -# signature of `with_circuit_breaker` to -# def with_circuit_breaker[T, **P]( -P = ParamSpec("P") -T = TypeVar("T") - def circuit_breaker_activated( key: str, @@ -86,103 +72,3 @@ def circuit_breaker_activated( metrics.incr(f"circuit_breaker.{key}.throttled") return True # blocked - - -def with_circuit_breaker( - key: str, - callback: Callable[P, T], - custom_config: CircuitBreakerConfig | None = None, -) -> T: - """ - Attempts to call the given callback, subject to a circuit breaker which will prevent the - callback from being called if has previously errored too many times in a row. - - If the breaker has been tripped, raises a `CircuitBreakerTripped` exception. If the callback is - called, and errors, increments the error count before allowing the error to bubble up to this - function's caller. Otherwise, simply returns the callback's result. - - Can optionally allow a subset of requests to bypass the circuit breaker, as a way to determine - whether the service has recovered. Once one of these requests succeeds, the circuit breaker will - be reset to its untripped state and the error count will be reset to 0. - - Note: The callback MUST NOT handle and then silently swallow exceptions, or else they won't - count towards the ciruit-breaking. In other words, this function should be used - along with an - `except CircuitBreakerTripped` block - inside the try-except wrapping the callback call: - - try: - with_circuit_breaker("fire", play_with_fire, config) - # or, if the callback takes arguments: - # with_circuit_breaker("fire", lambda: play_with_fire(fuel_type="wood"), config) - except CircuitBreakerTripped: - logger.log("Once burned, twice shy. No playing with fire for you today. Try again tomorrow.") - except BurnException: - logger.log("Ouch!") - - The threshold for tripping the circuit breaker and whether to allow bypass requests (and if so, - how many) can be set in the `config` argument. See the `CircuitBreakerConfig` class and - `CIRCUIT_BREAKER_DEFAULTS`. - """ - config: CircuitBreakerConfig = {**CIRCUIT_BREAKER_DEFAULTS, **(custom_config or {})} - error_count_key = ERROR_COUNT_CACHE_KEY(key) - - if _should_call_callback(key, error_count_key, config): - return _call_callback(error_count_key, config["error_limit_window"], callback) - else: - raise CircuitBreakerTripped - - -def _should_call_callback( - key: str, - error_count_key: str, - config: CircuitBreakerConfig, -) -> bool: - error_count = _get_or_set_error_count(error_count_key, config["error_limit_window"]) - if error_count < config["error_limit"]: - return True - - # Limit has been exceeded, check if we should allow any requests to pass through - if config["allow_passthrough"]: - should_bypass = not ratelimiter.backend.is_limited( - PASSTHROUGH_RATELIMIT_KEY(key), - limit=config["passthrough_attempts_per_interval"], - window=config["passthrough_interval"], - ) - if should_bypass: - metrics.incr(f"circuit_breaker.{key}.bypassed") - return True - - metrics.incr(f"circuit_breaker.{key}.throttled") - return False - - -def _call_callback(error_count_key: str, error_limit_window: int, callback: Callable[P, T]) -> T: - try: - result = callback() - except Exception: - _update_error_count(error_count_key, error_limit_window) - raise - else: - _update_error_count(error_count_key, error_limit_window, reset=True) - return result - - -def _update_error_count( - error_count_key: str, - error_limit_window: int, - reset: bool = False, -) -> None: - """ - Increment the count at the given key, unless `reset` is True, in which case, reset the count to 0. - """ - if reset: - new_count = 0 - else: - new_count = _get_or_set_error_count(error_count_key, error_limit_window) + 1 - - cache.set(error_count_key, new_count, error_limit_window) - - -def _get_or_set_error_count(error_count_key: str, error_limit_window: int) -> int: - error_count = cache.get_or_set(error_count_key, default=0, timeout=error_limit_window) - assert error_count is not None - return error_count diff --git a/tests/sentry/utils/test_circuit_breaker.py b/tests/sentry/utils/test_circuit_breaker.py index d9197a23b1f70..48543d56ada13 100644 --- a/tests/sentry/utils/test_circuit_breaker.py +++ b/tests/sentry/utils/test_circuit_breaker.py @@ -1,17 +1,13 @@ import time from unittest.mock import MagicMock, patch -import pytest from django.core.cache import cache from sentry.testutils.cases import TestCase from sentry.utils.circuit_breaker import ( ERROR_COUNT_CACHE_KEY, - CircuitBreakerConfig, CircuitBreakerPassthrough, - CircuitBreakerTripped, circuit_breaker_activated, - with_circuit_breaker, ) @@ -43,133 +39,3 @@ def test_passthrough(self, mock_metrics: MagicMock): time.sleep(1) assert not circuit_breaker_activated(self.key, self.error_limit, self.passthrough_data) mock_metrics.assert_called_with(f"circuit_breaker.{self.key}.bypassed") - - -class FailedToFetchError(Exception): - pass - - -class WithCircuitBreakerTest(TestCase): - def setUp(self): - self.key = "with_circuit_breaker_test" - self.error_limit = 2 - self.error_limit_window = 3 - self.config = CircuitBreakerConfig( - error_limit=self.error_limit, - error_limit_window=self.error_limit_window, - allow_passthrough=False, - passthrough_interval=2, - passthrough_attempts_per_interval=1, - ) - self.error_count_key = ERROR_COUNT_CACHE_KEY(self.key) - self.callback = MagicMock(wraps=lambda: "Dogs are great!") - self.erroring_callback = MagicMock( - side_effect=FailedToFetchError("Charlie didn't bring the ball back.") - ) - - def test_calls_callback_if_no_errors(self): - assert cache.get_or_set(self.error_count_key, default=0) == 0 - - result = with_circuit_breaker(self.key, self.callback, self.config) - - assert self.callback.call_count == 1 - assert result == "Dogs are great!" - - def test_calls_callback_if_not_too_many_errors(self): - cache.set(self.error_count_key, self.error_limit - 1) - - result = with_circuit_breaker(self.key, self.callback, self.config) - - assert self.callback.call_count == 1 - assert result == "Dogs are great!" - - @patch("sentry.utils.circuit_breaker.metrics.incr") - def test_prevents_next_request_if_breaker_is_tripped(self, mock_metrics_incr: MagicMock): - cache.set(self.error_count_key, self.error_limit - 1) - - # The breaker hasn't been tripped yet, so the callback's error bubbles up - with pytest.raises(FailedToFetchError): - with_circuit_breaker(self.key, self.erroring_callback, self.config) - - assert self.erroring_callback.call_count == 1 - assert cache.get(self.error_count_key) == self.error_limit - assert mock_metrics_incr.call_count == 0 - - # Now the breaker has been flipped, so we get a circuit breaker error instead - with pytest.raises(CircuitBreakerTripped): - with_circuit_breaker(self.key, self.erroring_callback, self.config) - - assert self.erroring_callback.call_count == 1 # hasn't increased - mock_metrics_incr.assert_called_with(f"circuit_breaker.{self.key}.throttled") - - @patch("sentry.utils.circuit_breaker.metrics.incr") - def test_obeys_passthrough_config(self, mock_metrics_incr: MagicMock): - cache.set(self.error_count_key, self.error_limit) - - # The passthrough is off by default, so the request is blocked and we get the circuit - # breaker error - with pytest.raises(CircuitBreakerTripped): - with_circuit_breaker(self.key, self.erroring_callback, self.config) - - assert self.erroring_callback.call_count == 0 - mock_metrics_incr.assert_called_with(f"circuit_breaker.{self.key}.throttled") - - # Allowing passthrough causes the request to go through, so we get the callback's error this time - self.config["allow_passthrough"] = True - with pytest.raises(FailedToFetchError): - with_circuit_breaker(self.key, self.erroring_callback, self.config) - - assert self.erroring_callback.call_count == 1 - mock_metrics_incr.assert_called_with(f"circuit_breaker.{self.key}.bypassed") - - # According to our config (see `setUp`), even with passthrough on, we only get one attempt - # every two seconds, so now we're back to getting the circuit breaker error - with pytest.raises(CircuitBreakerTripped): - with_circuit_breaker(self.key, self.erroring_callback, self.config) - - assert self.erroring_callback.call_count == 1 # hasn't increased - mock_metrics_incr.assert_called_with(f"circuit_breaker.{self.key}.throttled") - - # But if we wait the requisite two seconds, we're allowed another attempt, and we get the - # callback's error again - time.sleep(2) - with pytest.raises(FailedToFetchError): - with_circuit_breaker(self.key, self.erroring_callback, self.config) - - assert self.erroring_callback.call_count == 2 - mock_metrics_incr.assert_called_with(f"circuit_breaker.{self.key}.bypassed") - - @patch("sentry.utils.circuit_breaker.metrics.incr") - def test_resets_on_successful_request(self, mock_metrics_incr: MagicMock): - cache.set(self.error_count_key, self.error_limit) - self.config["allow_passthrough"] = True - - # Passthrough lets this request through - result = with_circuit_breaker(self.key, self.callback, self.config) - - assert self.callback.call_count == 1 - mock_metrics_incr.assert_called_with(f"circuit_breaker.{self.key}.bypassed") - assert result == "Dogs are great!" - - # Error count is reset - assert cache.get_or_set(self.error_count_key, default=0) == 0 - - @patch("sentry.utils.circuit_breaker.metrics.incr") - def resets_after_error_window(self, mock_metrics_incr: MagicMock): - cache.set(self.error_count_key, self.error_limit) - - with pytest.raises(CircuitBreakerTripped): - with_circuit_breaker(self.key, self.callback, self.config) - - assert self.callback.call_count == 0 - mock_metrics_incr.assert_called_with(f"circuit_breaker.{self.key}.throttled") - - time.sleep(self.error_limit_window) - - assert cache.get_or_set(self.error_count_key, default=0) == 0 - - # Now requests go through - result = with_circuit_breaker(self.key, self.callback, self.config) - - assert self.callback.call_count == 1 - assert result == "Dogs are great!" From bfe125362b72d81dcde041bd4a51a002ce323204 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Wed, 31 Jul 2024 15:08:11 -0700 Subject: [PATCH 34/52] chore(seer grouping): Remove `SeerSimilarIssuesMetadata` type (#75376) Now that `request_hash` is no longer part of the the Seer metadata we store on events[1], all the metadata is is the Seer results and the Seer model version - not really worth having a whole dataclass for, especially since it's only used in one spot in the code. This PR therefore removes it in favor a simple dictionary. [1] https://github.com/getsentry/sentry/pull/75209 --- src/sentry/grouping/ingest/seer.py | 18 +++++++++--------- src/sentry/seer/similarity/types.py | 7 ------- 2 files changed, 9 insertions(+), 16 deletions(-) diff --git a/src/sentry/grouping/ingest/seer.py b/src/sentry/grouping/ingest/seer.py index 6d951d6eeae0b..153398fff421a 100644 --- a/src/sentry/grouping/ingest/seer.py +++ b/src/sentry/grouping/ingest/seer.py @@ -1,17 +1,19 @@ import logging from dataclasses import asdict +from typing import Any from django.conf import settings from sentry import features, options from sentry import ratelimits as ratelimiter +from sentry.conf.server import SEER_SIMILARITY_MODEL_VERSION from sentry.eventstore.models import Event from sentry.grouping.grouping_info import get_grouping_info_from_variants from sentry.grouping.result import CalculatedHashes from sentry.models.group import Group from sentry.models.project import Project from sentry.seer.similarity.similar_issues import get_similarity_data_from_seer -from sentry.seer.similarity.types import SeerSimilarIssuesMetadata, SimilarIssuesEmbeddingsRequest +from sentry.seer.similarity.types import SimilarIssuesEmbeddingsRequest from sentry.seer.similarity.utils import ( event_content_is_seer_eligible, filter_null_from_string, @@ -187,12 +189,7 @@ def get_seer_similar_issues( event: Event, primary_hashes: CalculatedHashes, num_neighbors: int = 1, -) -> tuple[ - dict[ - str, str | list[dict[str, float | bool | int | str]] - ], # a SeerSimilarIssuesMetadata instance, dictified - Group | None, -]: +) -> tuple[dict[str, Any], Group | None]: """ Ask Seer for the given event's nearest neighbor(s) and return the seer response data, sorted with the best matches first, along with the group Seer decided the event should go in, if any, @@ -218,7 +215,10 @@ def get_seer_similar_issues( # Similar issues are returned with the closest match first seer_results = get_similarity_data_from_seer(request_data) - similar_issues_metadata = asdict(SeerSimilarIssuesMetadata(results=seer_results)) + similar_issues_metadata = { + "results": [asdict(result) for result in seer_results], + "similarity_model_version": SEER_SIMILARITY_MODEL_VERSION, + } parent_group = ( Group.objects.filter(id=seer_results[0].parent_group_id).first() if seer_results else None ) @@ -229,7 +229,7 @@ def get_seer_similar_issues( "event_id": event.event_id, "project_id": event.project.id, "hash": event_hash, - "results": similar_issues_metadata["results"], + "results": seer_results, "group_returned": bool(parent_group), }, ) diff --git a/src/sentry/seer/similarity/types.py b/src/sentry/seer/similarity/types.py index 3129f8cdab620..fc11ed32d6ad2 100644 --- a/src/sentry/seer/similarity/types.py +++ b/src/sentry/seer/similarity/types.py @@ -3,7 +3,6 @@ from dataclasses import dataclass from typing import Any, ClassVar, NotRequired, Self, TypedDict -from sentry.conf.server import SEER_SIMILARITY_MODEL_VERSION from sentry.models.grouphash import GroupHash from sentry.utils.json import apply_key_filter @@ -109,9 +108,3 @@ def from_raw(cls, project_id: int, raw_similar_issue_data: Mapping[str, Any]) -> } return cls(**similar_issue_data) - - -@dataclass -class SeerSimilarIssuesMetadata: - results: list[SeerSimilarIssueData] - similarity_model_version: str = SEER_SIMILARITY_MODEL_VERSION From 8499a5bc0aca51e626bfe6277013d3923b2f014e Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Wed, 31 Jul 2024 15:16:24 -0700 Subject: [PATCH 35/52] delete(remote-config): rm from settings (#75391) missed a spot in the settings sidebar --- .../views/settings/project/navigationConfiguration.tsx | 8 -------- 1 file changed, 8 deletions(-) diff --git a/static/app/views/settings/project/navigationConfiguration.tsx b/static/app/views/settings/project/navigationConfiguration.tsx index 72760b2d486cb..c3dbd1e025c75 100644 --- a/static/app/views/settings/project/navigationConfiguration.tsx +++ b/static/app/views/settings/project/navigationConfiguration.tsx @@ -1,4 +1,3 @@ -import FeatureBadge from 'sentry/components/badge/featureBadge'; import {t} from 'sentry/locale'; import ConfigStore from 'sentry/stores/configStore'; import type {Organization} from 'sentry/types/organization'; @@ -136,13 +135,6 @@ export default function getConfiguration({ title: t('Loader Script'), description: t("View and manage the project's Loader Script"), }, - { - path: `${pathPrefix}/remote-config/`, - badge: () => , - title: t('Remote Config'), - description: t("View and manage the project's Remote Configuration"), - show: organization?.features.includes('remote-config'), - }, { path: `${pathPrefix}/release-tracking/`, title: t('Releases'), From 2efcb160aaf7e09246c83ee08f966723957e259b Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 31 Jul 2024 18:16:34 -0400 Subject: [PATCH 36/52] ref: delete unused view_configure (#75388) split out from my "strongly type auth configure views" branch last referenced in 1caf42824c80abba43d16853d5e60c257ee01c7c --- src/sentry/plugins/base/v1.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/sentry/plugins/base/v1.py b/src/sentry/plugins/base/v1.py index df4a4cac43bba..c462dcc4c3cc4 100644 --- a/src/sentry/plugins/base/v1.py +++ b/src/sentry/plugins/base/v1.py @@ -505,12 +505,6 @@ def configure(self, request, project=None): def get_url_module(self): """Allows a plugin to return the import path to a URL module.""" - def view_configure(self, request, project, **kwargs): - if request.method == "GET": - return Response(self.get_configure_plugin_fields(project=project, **kwargs)) - self.configure(project, request.data) - return Response({"message": "Successfully updated configuration."}) - class Plugin(IPlugin, metaclass=PluginMount): """ From 6da54ef2efff835582112c7ac39b760fde19ac4f Mon Sep 17 00:00:00 2001 From: Ash <0Calories@users.noreply.github.com> Date: Wed, 31 Jul 2024 18:22:35 -0400 Subject: [PATCH 37/52] chore(ui): Adjust definition of transaction duration field (#75393) This field definition is what shows up in Discover, and other search bars that allow for querying transactions. It says specifically the duration in milliseconds, but Discover allows you to query by `ms`, `s`, `m`, `h`, so this PR changes this definition to be more general. --- static/app/utils/fields/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/utils/fields/index.ts b/static/app/utils/fields/index.ts index b03d606d46e6f..d75d50060d7a1 100644 --- a/static/app/utils/fields/index.ts +++ b/static/app/utils/fields/index.ts @@ -1123,7 +1123,7 @@ const EVENT_FIELD_DEFINITIONS: Record = { valueType: FieldValueType.STRING, }, [FieldKey.TRANSACTION_DURATION]: { - desc: t('Duration, in milliseconds, of the transaction'), + desc: t('Duration of the transaction'), kind: FieldKind.FIELD, valueType: FieldValueType.DURATION, }, From 0b26814da79c9d603865a1cb88208e34ab61e9de Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Wed, 31 Jul 2024 15:28:36 -0700 Subject: [PATCH 38/52] analytics(replay): add param for mobile replay (#75389) closes https://github.com/getsentry/sentry/issues/75330 --- .../app/components/replays/replayContext.tsx | 3 ++- .../utils/analytics/replayAnalyticsEvents.tsx | 2 ++ .../analytics/workflowAnalyticsEvents.tsx | 1 + static/app/utils/events.tsx | 1 + .../views/replays/detail/layout/focusTabs.tsx | 1 + static/app/views/replays/details.tsx | 22 +++++++++---------- 6 files changed, 18 insertions(+), 12 deletions(-) diff --git a/static/app/components/replays/replayContext.tsx b/static/app/components/replays/replayContext.tsx index 9d2f547a02076..9344c00e305ec 100644 --- a/static/app/components/replays/replayContext.tsx +++ b/static/app/components/replays/replayContext.tsx @@ -544,9 +544,10 @@ export function Provider({ user_email: user.email, play, context: analyticsContext, + mobile: isVideoReplay, }); }, - [organization, user.email, analyticsContext, getCurrentPlayerTime] + [organization, user.email, analyticsContext, getCurrentPlayerTime, isVideoReplay] ); useEffect(() => { diff --git a/static/app/utils/analytics/replayAnalyticsEvents.tsx b/static/app/utils/analytics/replayAnalyticsEvents.tsx index 4d393a4f087e4..3db84d3e7ef3f 100644 --- a/static/app/utils/analytics/replayAnalyticsEvents.tsx +++ b/static/app/utils/analytics/replayAnalyticsEvents.tsx @@ -51,6 +51,7 @@ export type ReplayEventParameters = { title: string; }; 'replay.details-tab-changed': { + mobile: boolean; tab: string; }; 'replay.details-time-spent': { @@ -88,6 +89,7 @@ export type ReplayEventParameters = { 'replay.list-view-setup-sidebar': {}; 'replay.play-pause': { context: string; + mobile: boolean; play: boolean; user_email: string; }; diff --git a/static/app/utils/analytics/workflowAnalyticsEvents.tsx b/static/app/utils/analytics/workflowAnalyticsEvents.tsx index 5e001aaa45dea..135675be4bc06 100644 --- a/static/app/utils/analytics/workflowAnalyticsEvents.tsx +++ b/static/app/utils/analytics/workflowAnalyticsEvents.tsx @@ -47,6 +47,7 @@ export type BaseEventAnalyticsParams = { frames_without_source_maps_percent?: number; has_graphql_request?: boolean; has_otel?: boolean; + mobile?: boolean; release_user_agent?: string; sdk_name?: string; sdk_version?: string; diff --git a/static/app/utils/events.tsx b/static/app/utils/events.tsx index 42ac576fac3e8..4fa385049623d 100644 --- a/static/app/utils/events.tsx +++ b/static/app/utils/events.tsx @@ -464,6 +464,7 @@ export function getAnalyticsDataForEvent(event?: Event | null): BaseEventAnalyti sdk_version: event?.sdk?.version, release_user_agent: event?.release?.userAgent, resolved_with: event?.resolvedWith ?? [], + mobile: isMobilePlatform(event?.platform), error_has_replay: Boolean(getReplayIdFromEvent(event)), error_has_user_feedback: defined(event?.userReport), has_otel: event?.contexts?.otel !== undefined, diff --git a/static/app/views/replays/detail/layout/focusTabs.tsx b/static/app/views/replays/detail/layout/focusTabs.tsx index b016f31fca380..4def49b97df3b 100644 --- a/static/app/views/replays/detail/layout/focusTabs.tsx +++ b/static/app/views/replays/detail/layout/focusTabs.tsx @@ -80,6 +80,7 @@ function FocusTabs({className, isVideoReplay}: Props) { trackAnalytics('replay.details-tab-changed', { tab, organization, + mobile: isVideoReplay, }); }} > diff --git a/static/app/views/replays/details.tsx b/static/app/views/replays/details.tsx index 6a95d80e72392..add668cf22e32 100644 --- a/static/app/views/replays/details.tsx +++ b/static/app/views/replays/details.tsx @@ -41,15 +41,6 @@ function ReplayDetails({params: {replaySlug}}: Props) { const location = useLocation(); const organization = useOrganization(); - useReplayPageview('replay.details-time-spent'); - useRouteAnalyticsEventNames('replay_details.viewed', 'Replay Details: Viewed'); - useRouteAnalyticsParams({ - organization, - referrer: decodeScalar(location.query.referrer), - user_email: user.email, - tab: location.query.t_main, - }); - const {slug: orgSlug} = organization; // TODO: replayId is known ahead of time and useReplayData is parsing it from the replaySlug @@ -69,6 +60,17 @@ function ReplayDetails({params: {replaySlug}}: Props) { }); const replayErrors = errors.filter(e => e.title !== 'User Feedback'); + const isVideoReplay = replay?.isVideoReplay(); + + useReplayPageview('replay.details-time-spent'); + useRouteAnalyticsEventNames('replay_details.viewed', 'Replay Details: Viewed'); + useRouteAnalyticsParams({ + organization, + referrer: decodeScalar(location.query.referrer), + user_email: user.email, + tab: location.query.t_main, + mobile: isVideoReplay, + }); useLogReplayDataLoaded({fetchError, fetching, projectSlug, replay}); @@ -177,8 +179,6 @@ function ReplayDetails({params: {replaySlug}}: Props) { ); } - const isVideoReplay = replay?.isVideoReplay(); - return ( Date: Wed, 31 Jul 2024 15:39:08 -0700 Subject: [PATCH 39/52] fix(toolbar): add back in ff click analytic (#75397) i was wrong, i did in fact delete them. adding them back ![image](https://github.com/user-attachments/assets/7784dea2-cb5c-4723-9d88-3c0a871dc70f) --- .../components/featureFlags/featureFlagItem.tsx | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/static/app/components/devtoolbar/components/featureFlags/featureFlagItem.tsx b/static/app/components/devtoolbar/components/featureFlags/featureFlagItem.tsx index fbe1c5b3f0dba..f1617c6e42d3b 100644 --- a/static/app/components/devtoolbar/components/featureFlags/featureFlagItem.tsx +++ b/static/app/components/devtoolbar/components/featureFlags/featureFlagItem.tsx @@ -18,7 +18,8 @@ import {useFeatureFlagsContext} from './featureFlagsContext'; type FeatureFlag = {name: string; override: FlagValue; value: FlagValue}; export default function FeatureFlagItem({flag}: {flag: FeatureFlag}) { - const {featureFlags} = useConfiguration(); + const {featureFlags, trackAnalytics} = useConfiguration(); + const {eventName, eventKey} = useContext(AnalyticsContext); return ( @@ -27,6 +28,12 @@ export default function FeatureFlagItem({flag}: {flag: FeatureFlag}) { { + trackAnalytics?.({ + eventKey: eventKey + '.click', + eventName: eventName + ' clicked', + }); + }} > {flag.name} From 6b4a60abe9317b367c7836d57a144021789e2031 Mon Sep 17 00:00:00 2001 From: Gabe Villalobos Date: Wed, 31 Jul 2024 15:52:15 -0700 Subject: [PATCH 40/52] fix(hybrid-cloud): Pushes a temporary fix for pydantic unpickling issues with the v2 upgrade (#75386) --- src/sentry/hybridcloud/rpc/__init__.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/sentry/hybridcloud/rpc/__init__.py b/src/sentry/hybridcloud/rpc/__init__.py index cc6d2b2fbcd23..91061ecbb18a3 100644 --- a/src/sentry/hybridcloud/rpc/__init__.py +++ b/src/sentry/hybridcloud/rpc/__init__.py @@ -49,6 +49,17 @@ class RpcModel(pydantic.BaseModel): from_attributes=True, use_enum_values=True, coerce_numbers_to_str=True ) + def __setstate__(self, state: dict[Any, Any]) -> None: + """ + __setstate__ override to alleviate an unpickling issue in production with the pydantic version upgrade. + """ + state.setdefault("__pydantic_extra__", {}) + state.setdefault("__pydantic_private__", {}) + + if "__pydantic_fields_set__" not in state: + state["__pydantic_fields_set__"] = state.get("__fields_set__") + super().__setstate__(state) + @classmethod def get_field_names(cls) -> Iterable[str]: return iter(cls.model_fields.keys()) From f2ed2e371b9bd14672adee5625e04ec1508c043a Mon Sep 17 00:00:00 2001 From: Andrew Liu <159852527+aliu39@users.noreply.github.com> Date: Wed, 31 Jul 2024 15:57:28 -0700 Subject: [PATCH 41/52] chore(feedback): Add deprecation notes to user report endpoint apidocs (#75396) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Ref https://github.com/getsentry/sentry-docs/issues/10643 The replay team decided on a call to revert https://github.com/getsentry/sentry/pull/75079 because some users may still need this doc as a reference. What it looks like: Screenshot 2024-07-31 at 3 18 44 PM Screenshot 2024-07-31 at 3 27 25 PM --- api-docs/paths/projects/user-feedback.json | 4 ++-- src/sentry/api/endpoints/project_user_reports.py | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/api-docs/paths/projects/user-feedback.json b/api-docs/paths/projects/user-feedback.json index 2913a4590520e..8c4f85eac9254 100644 --- a/api-docs/paths/projects/user-feedback.json +++ b/api-docs/paths/projects/user-feedback.json @@ -1,7 +1,7 @@ { "get": { "tags": ["Projects"], - "description": "Return a list of user feedback items within this project.", + "description": "Return a list of user feedback items within this project.\n\n*This list does not include submissions from the [User Feedback Widget](https://docs.sentry.io/product/user-feedback/#user-feedback-widget). This is because it is based on an older format called User Reports - read more [here](https://develop.sentry.dev/application/feedback-architecture/#user-reports).*", "operationId": "List a Project's User Feedback", "parameters": [ { @@ -68,7 +68,7 @@ }, "post": { "tags": ["Projects"], - "description": "Submit and associate user feedback with an issue.\n\nFeedback must be received by the server no more than 30 minutes after the event was saved.\n\nAdditionally, within 5 minutes of submitting feedback it may also be overwritten. This is useful in situations where you may need to retry sending a request due to network failures.\n\nIf feedback is rejected due to a mutability threshold, a 409 status code will be returned.\n\nNote: Feedback may be submitted with DSN authentication (see auth documentation).", + "description": "*This endpoint is DEPRECATED. We document it here for older SDKs and users who are still migrating to the [User Feedback Widget](https://docs.sentry.io/product/user-feedback/#user-feedback-widget) or [API](https://docs.sentry.io/platforms/javascript/user-feedback/#user-feedback-api)(multi-platform). If you are a new user, do not use this endpoint - unless you don't have a JS frontend, and your platform's SDK does not offer a feedback API.*\n\nFeedback must be received by the server no more than 30 minutes after the event was saved.\n\nAdditionally, within 5 minutes of submitting feedback it may also be overwritten. This is useful in situations where you may need to retry sending a request due to network failures.\n\nIf feedback is rejected due to a mutability threshold, a 409 status code will be returned.\n\nNote: Feedback may be submitted with DSN authentication (see auth documentation).", "operationId": "Submit User Feedback", "parameters": [ { diff --git a/src/sentry/api/endpoints/project_user_reports.py b/src/sentry/api/endpoints/project_user_reports.py index 36dda69e763ff..3660faf94254d 100644 --- a/src/sentry/api/endpoints/project_user_reports.py +++ b/src/sentry/api/endpoints/project_user_reports.py @@ -45,6 +45,8 @@ def get(self, request: Request, project) -> Response: Return a list of user feedback items within this project. + *This list does not include submissions from the [User Feedback Widget](https://docs.sentry.io/product/user-feedback/#user-feedback-widget). This is because it is based on an older format called User Reports - read more [here](https://develop.sentry.dev/application/feedback-architecture/#user-reports).* + :pparam string organization_id_or_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :auth: required @@ -89,6 +91,8 @@ def post(self, request: Request, project) -> Response: Submit User Feedback ```````````````````` + *This endpoint is DEPRECATED. We document it here for older SDKs and users who are still migrating to the [User Feedback Widget](https://docs.sentry.io/product/user-feedback/#user-feedback-widget) or [API](https://docs.sentry.io/platforms/javascript/user-feedback/#user-feedback-api)(multi-platform). If you are a new user, do not use this endpoint - unless you don't have a JS frontend, and your platform's SDK does not offer a feedback API.* + Submit and associate user feedback with an issue. Feedback must be received by the server no more than 30 minutes after the event was saved. From bdf52ad38766d3494f7d78532a5b3293a89dc06a Mon Sep 17 00:00:00 2001 From: Rohan Agarwal <47861399+roaga@users.noreply.github.com> Date: Wed, 31 Jul 2024 19:16:42 -0400 Subject: [PATCH 42/52] feat(autofix): Switch root cause to show relevant code context for issue instead of fixes (#75294) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Backend PR: https://github.com/getsentry/seer/pull/979 This reduces redundancy between the root cause and planning steps and helps the user understand and choose a root cause more confidently. This PR contains the frontend changes to match the backend changes in Seer. Screenshot 2024-07-31 at 12 43 50 PM --- static/app/components/codeSnippet.stories.tsx | 16 ++ static/app/components/codeSnippet.tsx | 17 +- .../events/autofix/autofixRootCause.spec.tsx | 20 +- .../events/autofix/autofixRootCause.tsx | 218 +++++++++--------- .../events/autofix/autofixSteps.tsx | 4 +- .../components/events/autofix/index.spec.tsx | 3 +- static/app/components/events/autofix/types.ts | 10 +- .../fixtures/autofixRootCauseCodeContext.ts | 16 ++ tests/js/fixtures/autofixRootCauseData.ts | 6 +- .../fixtures/autofixRootCauseSuggestedFix.ts | 17 -- .../endpoints/test_group_autofix_update.py | 3 - 11 files changed, 179 insertions(+), 151 deletions(-) create mode 100644 tests/js/fixtures/autofixRootCauseCodeContext.ts delete mode 100644 tests/js/fixtures/autofixRootCauseSuggestedFix.ts diff --git a/static/app/components/codeSnippet.stories.tsx b/static/app/components/codeSnippet.stories.tsx index dce391a761ad9..9876bb931ebf9 100644 --- a/static/app/components/codeSnippet.stories.tsx +++ b/static/app/components/codeSnippet.stories.tsx @@ -133,6 +133,22 @@ Sentry.addIntegration(replayIntegration());`} integrations: [], }); +// Sometime later +const { replayIntegration } = await import("@sentry/browser"); +Sentry.addIntegration(replayIntegration());`} +
+

+ +

+ {`Sentry.init({ + // Note, Replay is NOT instantiated below: + integrations: [], +}); + // Sometime later const { replayIntegration } = await import("@sentry/browser"); Sentry.addIntegration(replayIntegration());`} diff --git a/static/app/components/codeSnippet.tsx b/static/app/components/codeSnippet.tsx index 47e0624df3a41..e6e94e4b1c240 100644 --- a/static/app/components/codeSnippet.tsx +++ b/static/app/components/codeSnippet.tsx @@ -36,6 +36,10 @@ interface CodeSnippetProps { */ isRounded?: boolean; language?: string; + /** + * Line numbers of the code that will be visually highlighted. + */ + linesToHighlight?: number[]; /** * Fires after the code snippet is highlighted and all DOM nodes are available * @param element The root element of the code snippet @@ -69,6 +73,7 @@ export function CodeSnippet({ filename, hideCopyButton, language, + linesToHighlight, icon, isRounded = true, onAfterHighlight, @@ -80,6 +85,13 @@ export function CodeSnippet({ }: CodeSnippetProps) { const ref = useRef(null); + // https://prismjs.com/plugins/line-highlight/ + useEffect(() => { + if (linesToHighlight) { + import('prismjs/plugins/line-highlight/prism-line-highlight'); + } + }, [linesToHighlight]); + useEffect(() => { const element = ref.current; if (!element) { @@ -169,7 +181,10 @@ export function CodeSnippet({ )} -
+      
         );
 
-    // Displays all root cause and suggested fix info
+    // Displays all root cause and code context info
     expect(screen.getByText('This is the title of a root cause.')).toBeInTheDocument();
     expect(
       screen.getByText('This is the description of a root cause.')
     ).toBeInTheDocument();
     expect(
-      screen.getByText('Suggested Fix #1: This is the title of a suggested fix.')
+      screen.getByText('Relevant Code #1: This is the title of a relevant code snippet.')
     ).toBeInTheDocument();
     expect(
-      screen.getByText('This is the description of a suggested fix.')
+      screen.getByText('This is the description of a relevant code snippet.')
     ).toBeInTheDocument();
 
-    await userEvent.click(screen.getByRole('button', {name: 'Continue With This Fix'}));
+    await userEvent.click(screen.getByRole('button', {name: 'Continue with a fix'}));
 
     expect(mockSelectFix).toHaveBeenCalledWith(
       expect.anything(),
@@ -42,7 +42,6 @@ describe('AutofixRootCause', function () {
           payload: {
             type: 'select_root_cause',
             cause_id: '100',
-            fix_id: '200',
           },
         },
       })
@@ -61,7 +60,12 @@ describe('AutofixRootCause', function () {
       screen.getByRole('button', {name: 'Provide your own root cause'})
     );
     await userEvent.keyboard('custom root cause');
-    await userEvent.click(screen.getByRole('button', {name: 'Continue With This Fix'}));
+    await userEvent.click(
+      screen.getByRole('button', {
+        name: 'Continue with a fix',
+        description: 'Continue with a fix',
+      })
+    );
 
     expect(mockSelectFix).toHaveBeenCalledWith(
       expect.anything(),
@@ -87,7 +91,7 @@ describe('AutofixRootCause', function () {
       />
     );
 
-    // Displays all root cause and suggested fix info
+    // Displays all root cause and code context info
     expect(
       screen.getByText('Autofix was not able to find a root cause. Maybe try again?')
     ).toBeInTheDocument();
diff --git a/static/app/components/events/autofix/autofixRootCause.tsx b/static/app/components/events/autofix/autofixRootCause.tsx
index 489ac0311f2ad..adb4628755d9d 100644
--- a/static/app/components/events/autofix/autofixRootCause.tsx
+++ b/static/app/components/events/autofix/autofixRootCause.tsx
@@ -8,10 +8,10 @@ import {Button} from 'sentry/components/button';
 import {CodeSnippet} from 'sentry/components/codeSnippet';
 import {AutofixShowMore} from 'sentry/components/events/autofix/autofixShowMore';
 import {
+  type AutofixRootCauseCodeContext,
+  type AutofixRootCauseCodeContextSnippet,
   type AutofixRootCauseData,
   type AutofixRootCauseSelection,
-  type AutofixRootCauseSuggestedFix,
-  type AutofixRootCauseSuggestedFixSnippet,
   AutofixStepType,
 } from 'sentry/components/events/autofix/types';
 import {
@@ -53,7 +53,6 @@ function useSelectCause({groupId, runId}: {groupId: string; runId: string}) {
       params:
         | {
             causeId: string;
-            fixId: string;
           }
         | {
             customRootCause: string;
@@ -75,7 +74,6 @@ function useSelectCause({groupId, runId}: {groupId: string; runId: string}) {
                 payload: {
                   type: 'select_root_cause',
                   cause_id: params.causeId,
-                  fix_id: params.fixId,
                 },
               },
       });
@@ -108,7 +106,6 @@ function useSelectCause({groupId, runId}: {groupId: string; runId: string}) {
                         }
                       : {
                           cause_id: params.causeId,
-                          fix_id: params.fixId,
                         },
                 };
               }),
@@ -123,6 +120,33 @@ function useSelectCause({groupId, runId}: {groupId: string; runId: string}) {
   });
 }
 
+function getLinesToHighlight(suggestedFix: AutofixRootCauseCodeContext): number[] {
+  function findLinesWithSubstrings(
+    input: string | undefined,
+    substring: string
+  ): number[] {
+    if (!input) {
+      return [];
+    }
+    const lines = input.split('\n');
+    const result: number[] = [];
+
+    lines.forEach((line, index) => {
+      if (line.includes(substring)) {
+        result.push(index + 1); // line numbers are 1-based
+      }
+    });
+
+    return result;
+  }
+
+  const lineNumbersToHighlight = findLinesWithSubstrings(
+    suggestedFix.snippet?.snippet,
+    '***'
+  );
+  return lineNumbersToHighlight;
+}
+
 function RootCauseContent({
   selected,
   children,
@@ -143,65 +167,30 @@ function RootCauseContent({
   );
 }
 
-function SuggestedFixSnippet({snippet}: {snippet: AutofixRootCauseSuggestedFixSnippet}) {
+function SuggestedFixSnippet({
+  snippet,
+  linesToHighlight,
+}: {
+  linesToHighlight: number[];
+  snippet: AutofixRootCauseCodeContextSnippet;
+}) {
   const extension = getFileExtension(snippet.file_path);
   const lanugage = extension ? getPrismLanguage(extension) : undefined;
 
   return (
     
- + {snippet.snippet}
); } -function CauseSuggestedFix({ - fixNumber, - suggestedFix, - groupId, - runId, - causeId, -}: { - causeId: string; - fixNumber: number; - groupId: string; - runId: string; - suggestedFix: AutofixRootCauseSuggestedFix; -}) { - const {isLoading, mutate: handleSelectFix} = useSelectCause({groupId, runId}); - - return ( - - - - - -

- {suggestedFix.snippet && } - - ); -} - function CauseOption({ cause, selected, @@ -215,6 +204,8 @@ function CauseOption({ selected: boolean; setSelectedId: (id: string) => void; }) { + const {isLoading, mutate: handleSelectFix} = useSelectCause({groupId, runId}); + return ( setSelectedId(cause.id)}> {!selected && } @@ -224,13 +215,26 @@ function CauseOption({ __html: singleLineRenderer(cause.title), }} /> - + @@ -367,11 +350,8 @@ function AutofixRootCauseDisplay({ } const selectedCause = causes.find(cause => cause.id === rootCauseSelection.cause_id); - const selectedFix = selectedCause?.suggested_fixes?.find( - fix => fix.id === rootCauseSelection.fix_id - ); - if (!selectedCause || !selectedFix) { + if (!selectedCause) { return {t('Selected root cause not found.')}; } @@ -380,7 +360,7 @@ function AutofixRootCauseDisplay({ return ( {otherCauses.length > 0 && ( @@ -397,23 +377,7 @@ function AutofixRootCauseDisplay({ __html: marked(cause.description), }} /> - {cause.suggested_fixes?.map(fix => ( - - - - -

- {fix.snippet && } - - ))} + ))} @@ -469,6 +433,35 @@ export function AutofixRootCause(props: AutofixRootCauseProps) { return ; } +export function AutofixRootCauseCodeContexts({ + codeContext, +}: { + codeContext: AutofixRootCauseCodeContext[]; +}) { + return codeContext?.map((fix, index) => ( + + + + +

+ {fix.snippet && ( + + )} + + )); +} + const NoCausesPadding = styled('div')` padding: 0 ${space(2)}; `; @@ -567,3 +560,8 @@ const OptionFooter = styled('div')` const CustomRootCausePadding = styled('div')` padding: 0 ${space(2)} ${space(2)} ${space(2)}; `; + +const RootCauseOptionsRow = styled('div')` + display: flex; + flex-direction: row; +`; diff --git a/static/app/components/events/autofix/autofixSteps.tsx b/static/app/components/events/autofix/autofixSteps.tsx index 671b742470a21..8c37ed7d8dff6 100644 --- a/static/app/components/events/autofix/autofixSteps.tsx +++ b/static/app/components/events/autofix/autofixSteps.tsx @@ -215,7 +215,9 @@ export function ExpandableStep({ {activeLog && !isExpanded && ( )} diff --git a/static/app/components/events/autofix/index.spec.tsx b/static/app/components/events/autofix/index.spec.tsx index 82c1a6aefb93c..cbc95ed82ec46 100644 --- a/static/app/components/events/autofix/index.spec.tsx +++ b/static/app/components/events/autofix/index.spec.tsx @@ -169,12 +169,11 @@ describe('Autofix', () => { likelihood: 1, title: 'Test Cause Title', description: 'Test Cause Description', - suggested_fixes: [ + code_context: [ { id: 'fix-1', title: 'Test Fix Title', description: 'Test Fix Description', - elegance: 1, snippet: { file_path: 'test/file/path.py', snippet: 'two = 1 + 1', diff --git a/static/app/components/events/autofix/types.ts b/static/app/components/events/autofix/types.ts index f6d256d7c3ef7..dd6ffa7c30be5 100644 --- a/static/app/components/events/autofix/types.ts +++ b/static/app/components/events/autofix/types.ts @@ -89,7 +89,6 @@ export interface AutofixDefaultStep extends BaseStep { export type AutofixRootCauseSelection = | { cause_id: string; - fix_id: string; } | {custom_root_cause: string} | null; @@ -122,25 +121,24 @@ export interface AutofixUserResponseStep extends BaseStep { user_id: number; } -export type AutofixRootCauseSuggestedFixSnippet = { +export type AutofixRootCauseCodeContextSnippet = { file_path: string; snippet: string; }; -export type AutofixRootCauseSuggestedFix = { +export type AutofixRootCauseCodeContext = { description: string; - elegance: number; id: string; title: string; - snippet?: AutofixRootCauseSuggestedFixSnippet; + snippet?: AutofixRootCauseCodeContextSnippet; }; export type AutofixRootCauseData = { actionability: number; + code_context: AutofixRootCauseCodeContext[]; description: string; id: string; likelihood: number; - suggested_fixes: AutofixRootCauseSuggestedFix[]; title: string; }; diff --git a/tests/js/fixtures/autofixRootCauseCodeContext.ts b/tests/js/fixtures/autofixRootCauseCodeContext.ts new file mode 100644 index 0000000000000..59645d0fe7a00 --- /dev/null +++ b/tests/js/fixtures/autofixRootCauseCodeContext.ts @@ -0,0 +1,16 @@ +import type { AutofixRootCauseCodeContext } from 'sentry/components/events/autofix/types'; + +export function AutofixRootCauseCodeContext( + params: Partial = {} +): AutofixRootCauseCodeContext { + return { + id: '200', + title: 'This is the title of a relevant code snippet.', + description: 'This is the description of a relevant code snippet.', + snippet: { + file_path: 'src/file.py', + snippet: 'x = 1 + 1;', + }, + ...params, + }; +} diff --git a/tests/js/fixtures/autofixRootCauseData.ts b/tests/js/fixtures/autofixRootCauseData.ts index 3f06b5827abfc..ee33d78f19561 100644 --- a/tests/js/fixtures/autofixRootCauseData.ts +++ b/tests/js/fixtures/autofixRootCauseData.ts @@ -1,6 +1,6 @@ -import {AutofixRootCauseSuggestedFix} from 'sentry-fixture/autofixRootCauseSuggestedFix'; +import { AutofixRootCauseCodeContext } from 'sentry-fixture/autofixRootCauseCodeContext'; -import type {AutofixRootCauseData} from 'sentry/components/events/autofix/types'; +import type { AutofixRootCauseData } from 'sentry/components/events/autofix/types'; export function AutofixRootCauseData( params: Partial = {} @@ -11,7 +11,7 @@ export function AutofixRootCauseData( description: 'This is the description of a root cause.', actionability: 0.8, likelihood: 0.9, - suggested_fixes: [AutofixRootCauseSuggestedFix()], + code_context: [AutofixRootCauseCodeContext()], ...params, }; } diff --git a/tests/js/fixtures/autofixRootCauseSuggestedFix.ts b/tests/js/fixtures/autofixRootCauseSuggestedFix.ts deleted file mode 100644 index 57b912451832a..0000000000000 --- a/tests/js/fixtures/autofixRootCauseSuggestedFix.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type {AutofixRootCauseSuggestedFix} from 'sentry/components/events/autofix/types'; - -export function AutofixRootCauseSuggestedFix( - params: Partial = {} -): AutofixRootCauseSuggestedFix { - return { - id: '200', - title: 'This is the title of a suggested fix.', - description: 'This is the description of a suggested fix.', - elegance: 0.8, - snippet: { - file_path: 'src/file.py', - snippet: 'x = 1 + 1;', - }, - ...params, - }; -} diff --git a/tests/sentry/api/endpoints/test_group_autofix_update.py b/tests/sentry/api/endpoints/test_group_autofix_update.py index de02f34a24689..4fe79a8dc7c32 100644 --- a/tests/sentry/api/endpoints/test_group_autofix_update.py +++ b/tests/sentry/api/endpoints/test_group_autofix_update.py @@ -26,7 +26,6 @@ def test_autofix_update_successful(self, mock_post): "payload": { "type": "select_root_cause", "cause_id": 456, - "fix_id": 789, }, }, format="json", @@ -41,7 +40,6 @@ def test_autofix_update_successful(self, mock_post): "payload": { "type": "select_root_cause", "cause_id": 456, - "fix_id": 789, }, "invoking_user": { "id": self.user.id, @@ -64,7 +62,6 @@ def test_autofix_update_failure(self, mock_post): "payload": { "type": "select_root_cause", "cause_id": 456, - "fix_id": 789, }, "invoking_user": { "id": self.user.id, From 215491de2bcd42dbbe7c18b5bae1bcec6257b0b6 Mon Sep 17 00:00:00 2001 From: Josh Callender <1569818+saponifi3d@users.noreply.github.com> Date: Wed, 31 Jul 2024 16:56:11 -0700 Subject: [PATCH 43/52] Process delayed alert conditions in batches of 10,000 (#75302) # Description Some orgs are sending 100k+ events per minute, and the processing is taking to long for a single task. This PR will look at the size of the hash and determine if it needs to be batched. There's some restrictions around the celery task / redis, info is outlined in a code comment here: https://github.com/getsentry/sentry/pull/75302/files#diff-f906e75a0e4419db4870fa45ca5a1608ca79beaa052c8bc50b4805607a665d27R482-R486 --- src/sentry/buffer/base.py | 14 ++ src/sentry/buffer/redis.py | 18 +++ src/sentry/options/defaults.py | 5 + .../rules/processing/delayed_processing.py | 101 ++++++++---- tests/sentry/buffer/test_base.py | 7 + tests/sentry/buffer/test_redis.py | 31 ++++ .../processing/test_delayed_processing.py | 149 ++++++++++++++++-- 7 files changed, 280 insertions(+), 45 deletions(-) diff --git a/src/sentry/buffer/base.py b/src/sentry/buffer/base.py index 6182667947c5f..f2ef35ad7440e 100644 --- a/src/sentry/buffer/base.py +++ b/src/sentry/buffer/base.py @@ -34,6 +34,7 @@ class Buffer(Service): "push_to_hash", "get_sorted_set", "get_hash", + "get_hash_length", "delete_hash", "delete_key", ) @@ -54,6 +55,11 @@ def get_hash( ) -> dict[str, str]: return {} + def get_hash_length( + self, model: type[models.Model], field: dict[str, models.Model | str | int] + ) -> int: + raise NotImplementedError + def get_sorted_set(self, key: str, min: float, max: float) -> list[tuple[int, datetime]]: return [] @@ -69,6 +75,14 @@ def push_to_hash( ) -> None: return None + def push_to_hash_bulk( + self, + model: type[models.Model], + filters: dict[str, models.Model | str | int], + data: dict[str, str], + ) -> None: + raise NotImplementedError + def delete_hash( self, model: type[models.Model], diff --git a/src/sentry/buffer/redis.py b/src/sentry/buffer/redis.py index 27c00b6d77887..bce5263f42789 100644 --- a/src/sentry/buffer/redis.py +++ b/src/sentry/buffer/redis.py @@ -81,13 +81,16 @@ def callback(self, buffer_hook_event: BufferHookEvent) -> bool: redis_buffer_registry = BufferHookRegistry() +# Note HMSET is not supported after redis 4.0.0, after updating we can use HSET directly. class RedisOperation(Enum): SORTED_SET_ADD = "zadd" SORTED_SET_GET_RANGE = "zrangebyscore" SORTED_SET_DELETE_RANGE = "zremrangebyscore" HASH_ADD = "hset" + HASH_ADD_BULK = "hmset" HASH_GET_ALL = "hgetall" HASH_DELETE = "hdel" + HASH_LENGTH = "hlen" class PendingBuffer: @@ -296,6 +299,15 @@ def push_to_hash( key = self._make_key(model, filters) self._execute_redis_operation(key, RedisOperation.HASH_ADD, field, value) + def push_to_hash_bulk( + self, + model: type[models.Model], + filters: dict[str, models.Model | str | int], + data: dict[str, str], + ) -> None: + key = self._make_key(model, filters) + self._execute_redis_operation(key, RedisOperation.HASH_ADD_BULK, data) + def get_hash( self, model: type[models.Model], field: dict[str, models.Model | str | int] ) -> dict[str, str]: @@ -311,6 +323,12 @@ def get_hash( return decoded_hash + def get_hash_length( + self, model: type[models.Model], field: dict[str, models.Model | str | int] + ) -> int: + key = self._make_key(model, field) + return self._execute_redis_operation(key, RedisOperation.HASH_LENGTH) + def process_batch(self) -> None: try: redis_buffer_registry.callback(BufferHookEvent.FLUSH) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index cb92d5b4281b6..d0a5b5a4af874 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -2636,3 +2636,8 @@ default=1, flags=FLAG_AUTOMATOR_MODIFIABLE, ) +register( + "delayed_processing.batch_size", + default=10000, + flags=FLAG_AUTOMATOR_MODIFIABLE, +) diff --git a/src/sentry/rules/processing/delayed_processing.py b/src/sentry/rules/processing/delayed_processing.py index 68248e037783f..41f69e7557214 100644 --- a/src/sentry/rules/processing/delayed_processing.py +++ b/src/sentry/rules/processing/delayed_processing.py @@ -4,13 +4,15 @@ from collections import defaultdict from collections.abc import Sequence from datetime import datetime, timedelta, timezone +from itertools import islice from typing import Any, DefaultDict, NamedTuple import sentry_sdk from django.db.models import OuterRef, Subquery -from sentry import buffer, nodestore +from sentry import buffer, nodestore, options from sentry.buffer.redis import BufferHookEvent, redis_buffer_registry +from sentry.db import models from sentry.eventstore.models import Event, GroupEvent from sentry.issues.issue_occurrence import IssueOccurrence from sentry.models.group import Group @@ -85,8 +87,15 @@ def fetch_project(project_id: int) -> Project | None: return None -def fetch_rulegroup_to_event_data(project_id: int) -> dict[str, str]: - return buffer.backend.get_hash(model=Project, field={"project_id": project_id}) +def fetch_rulegroup_to_event_data(project_id: int, batch_key: str | None = None) -> dict[str, str]: + field: dict[str, models.Model | int | str] = { + "project_id": project_id, + } + + if batch_key: + field["batch_key"] = batch_key + + return buffer.backend.get_hash(model=Project, field=field) def get_rules_to_groups(rulegroup_to_event_data: dict[str, str]) -> DefaultDict[int, set[int]]: @@ -447,13 +456,17 @@ def fire_rules( safe_execute(callback, groupevent, futures) -def cleanup_redis_buffer(project_id: int, rules_to_groups: DefaultDict[int, set[int]]) -> None: +def cleanup_redis_buffer( + project_id: int, rules_to_groups: DefaultDict[int, set[int]], batch_key: str | None +) -> None: hashes_to_delete = [ f"{rule}:{group}" for rule, groups in rules_to_groups.items() for group in groups ] - buffer.backend.delete_hash( - model=Project, filters={"project_id": project_id}, fields=hashes_to_delete - ) + filters: dict[str, models.Model | str | int] = {"project_id": project_id} + if batch_key: + filters["batch_key"] = batch_key + + buffer.backend.delete_hash(model=Project, filters=filters, fields=hashes_to_delete) def bucket_num_groups(num_groups: int) -> str: @@ -463,6 +476,55 @@ def bucket_num_groups(num_groups: int) -> str: return "1" +def process_rulegroups_in_batches(project_id: int): + """ + This will check the number of rulegroup_to_event_data items in the Redis buffer for a project. + + If the number is larger than the batch size, it will chunk the items and process them in batches. + + The batches are replicated into a new redis hash with a unique filter (a uuid) to identify the batch. + We need to use a UUID because these batches can be created in multiple processes and we need to ensure + uniqueness across all of them for the centralized redis buffer. The batches are stored in redis because + we shouldn't pass objects that need to be pickled and 10k items could be problematic in the celery tasks + as arguments could be problematic. Finally, we can't use a pagination system on the data because + redis doesn't maintain the sort order of the hash keys. + + `apply_delayed` will fetch the batch from redis and process the rules. + """ + batch_size = options.get("delayed_processing.batch_size") + event_count = buffer.backend.get_hash_length(Project, {"project_id": project_id}) + + if event_count < batch_size: + return apply_delayed.delayed(project_id) + + logger.info( + "delayed_processing.process_large_batch", + extra={"project_id": project_id, "count": event_count}, + ) + + # if the dictionary is large, get the items and chunk them. + rulegroup_to_event_data = fetch_rulegroup_to_event_data(project_id) + + with metrics.timer("delayed_processing.process_batch.duration"): + items = iter(rulegroup_to_event_data.items()) + + while batch := dict(islice(items, batch_size)): + batch_key = str(uuid.uuid4()) + + buffer.backend.push_to_hash_bulk( + model=Project, + filters={"project_id": project_id, "batch_key": batch_key}, + data=batch, + ) + + # remove the batched items from the project rulegroup_to_event_data + buffer.backend.delete_hash( + model=Project, filters={"project_id": project_id}, fields=list(batch.keys()) + ) + + apply_delayed.delayed(project_id, batch_key) + + def process_delayed_alert_conditions() -> None: with metrics.timer("delayed_processing.process_all_conditions.duration"): fetch_time = datetime.now(tz=timezone.utc) @@ -473,7 +535,7 @@ def process_delayed_alert_conditions() -> None: logger.info("delayed_processing.project_id_list", extra={"project_ids": log_str}) for project_id, _ in project_ids: - apply_delayed.delay(project_id) + process_rulegroups_in_batches(project_id) buffer.backend.delete_key(PROJECT_ID_BUFFER_LIST_KEY, min=0, max=fetch_time.timestamp()) @@ -487,32 +549,15 @@ def process_delayed_alert_conditions() -> None: time_limit=60, silo_mode=SiloMode.REGION, ) -def apply_delayed(project_id: int, *args: Any, **kwargs: Any) -> None: +def apply_delayed(project_id: int, batch_key: str | None = None, *args: Any, **kwargs: Any) -> None: """ Grab rules, groups, and events from the Redis buffer, evaluate the "slow" conditions in a bulk snuba query, and fire them if they pass """ project = fetch_project(project_id) if not project: - # Should we remove the project_id from the redis queue? - return - - rulegroup_to_event_data = fetch_rulegroup_to_event_data(project_id) - num_groups = len(rulegroup_to_event_data) - num_groups_bucketed = bucket_num_groups(num_groups) - metrics.incr("delayed_processing.num_groups", tags={"num_groups": num_groups_bucketed}) - - if num_groups >= 10000: - logger.error( - "delayed_processing.too_many_groups", - extra={ - "project_id": project_id, - "num_groups": num_groups, - "organization_id": project.organization_id, - }, - ) - # TODO @saponifi3d - Split the processing from here into smaller groups return + rulegroup_to_event_data = fetch_rulegroup_to_event_data(project_id, batch_key) rules_to_groups = get_rules_to_groups(rulegroup_to_event_data) alert_rules = fetch_alert_rules(list(rules_to_groups.keys())) condition_groups = get_condition_query_groups(alert_rules, rules_to_groups) @@ -542,7 +587,7 @@ def apply_delayed(project_id: int, *args: Any, **kwargs: Any) -> None: with metrics.timer("delayed_processing.fire_rules.duration"): fire_rules(rules_to_fire, parsed_rulegroup_to_event_data, alert_rules, project) - cleanup_redis_buffer(project_id, rules_to_groups) + cleanup_redis_buffer(project_id, rules_to_groups, batch_key) if not redis_buffer_registry.has(BufferHookEvent.FLUSH): diff --git a/tests/sentry/buffer/test_base.py b/tests/sentry/buffer/test_base.py index 5f216d8bc2a5a..c0cd0d241ab9b 100644 --- a/tests/sentry/buffer/test_base.py +++ b/tests/sentry/buffer/test_base.py @@ -2,6 +2,7 @@ from unittest import mock from django.utils import timezone +from pytest import raises from sentry.buffer.base import Buffer from sentry.db import models @@ -77,3 +78,9 @@ def test_signal_only(self, create_or_update): self.buf.process(Group, columns, filters, {"last_seen": the_date}, signal_only=True) group.refresh_from_db() assert group.times_seen == prev_times_seen + + def test_push_to_hash_bulk(self): + raises(NotImplementedError, self.buf.push_to_hash_bulk, Group, {"id": 1}, {"foo": "bar"}) + + def test_get_hash_length(self): + raises(NotImplementedError, self.buf.get_hash_length, Group, {"id": 1}) diff --git a/tests/sentry/buffer/test_redis.py b/tests/sentry/buffer/test_redis.py index d2da538de87e9..c725ec4e78e16 100644 --- a/tests/sentry/buffer/test_redis.py +++ b/tests/sentry/buffer/test_redis.py @@ -1,6 +1,7 @@ import datetime import pickle from collections import defaultdict +from collections.abc import Mapping from unittest import mock from unittest.mock import Mock @@ -367,6 +368,36 @@ def test_process_uses_signal_only(self, process): self.buf.process("foo") process.assert_called_once_with(mock.Mock, {"times_seen": 1}, {"pk": 1}, {}, True) + @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo")) + def test_get_hash_length(self): + client = get_cluster_routing_client(self.buf.cluster, self.buf.is_redis_cluster) + data: Mapping[str | bytes, bytes | float | int | str] = { + "f": '{"pk": ["i","1"]}', + "i+times_seen": "1", + "m": "unittest.mock.Mock", + "s": "1", + } + + client.hmset("foo", data) + buffer_length = self.buf.get_hash_length("foo", field={"bar": 1}) + assert buffer_length == len(data) + + @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo")) + def test_push_to_hash_bulk(self): + def decode_dict(d): + return {k: v.decode("utf-8") if isinstance(v, bytes) else v for k, v in d.items()} + + client = get_cluster_routing_client(self.buf.cluster, self.buf.is_redis_cluster) + data = { + "f": '{"pk": ["i","1"]}', + "i+times_seen": "1", + "m": "unittest.mock.Mock", + "s": "1", + } + self.buf.push_to_hash_bulk(model=Project, filters={"project_id": 1}, data=data) + result = _hgetall_decode_keys(client, "foo", self.buf.is_redis_cluster) + assert decode_dict(result) == data + # @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo")) # def test_incr_uses_signal_only(self): diff --git a/tests/sentry/rules/processing/test_delayed_processing.py b/tests/sentry/rules/processing/test_delayed_processing.py index 2330115404f12..c049ecf6040e5 100644 --- a/tests/sentry/rules/processing/test_delayed_processing.py +++ b/tests/sentry/rules/processing/test_delayed_processing.py @@ -25,6 +25,7 @@ apply_delayed, bucket_num_groups, bulk_fetch_events, + cleanup_redis_buffer, generate_unique_queries, get_condition_group_results, get_condition_query_groups, @@ -34,11 +35,13 @@ get_slow_conditions, parse_rulegroup_to_event_data, process_delayed_alert_conditions, + process_rulegroups_in_batches, ) from sentry.rules.processing.processor import PROJECT_ID_BUFFER_LIST_KEY from sentry.testutils.cases import PerformanceIssueTestCase, RuleTestCase, TestCase from sentry.testutils.factories import EventType from sentry.testutils.helpers.datetime import before_now, freeze_time, iso_format +from sentry.testutils.helpers.options import override_options from sentry.testutils.helpers.redis import mock_redis_buffer from sentry.utils import json from sentry.utils.safe import safe_execute @@ -83,6 +86,23 @@ def _callthrough_with_order(*args, **kwargs): @freeze_time(FROZEN_TIME) class CreateEventTestCase(TestCase, BaseEventFrequencyPercentTest): + def setUp(self): + super().setUp() + self.mock_redis_buffer = mock_redis_buffer() + self.mock_redis_buffer.__enter__() + + def tearDown(self): + self.mock_redis_buffer.__exit__(None, None, None) + + def push_to_hash(self, project_id, rule_id, group_id, event_id=None, occurrence_id=None): + value = json.dumps({"event_id": event_id, "occurrence_id": occurrence_id}) + buffer.backend.push_to_hash( + model=Project, + filters={"project_id": project_id}, + field=f"{rule_id}:{group_id}", + value=value, + ) + def create_event( self, project_id: int, @@ -643,23 +663,12 @@ def test_parse_rulegroup_invalid_json(self): class ProcessDelayedAlertConditionsTest(CreateEventTestCase, PerformanceIssueTestCase): buffer_timestamp = (FROZEN_TIME + timedelta(seconds=1)).timestamp() - def push_to_hash(self, project_id, rule_id, group_id, event_id=None, occurrence_id=None): - value = json.dumps({"event_id": event_id, "occurrence_id": occurrence_id}) - buffer.backend.push_to_hash( - model=Project, - filters={"project_id": project_id}, - field=f"{rule_id}:{group_id}", - value=value, - ) - def assert_buffer_cleared(self, project_id): rule_group_data = buffer.backend.get_hash(Project, {"project_id": project_id}) assert rule_group_data == {} def setUp(self): super().setUp() - self.mock_redis_buffer = mock_redis_buffer() - self.mock_redis_buffer.__enter__() self.tag_filter = { "id": "sentry.rules.filters.tagged_event.TaggedEventFilter", @@ -747,11 +756,8 @@ def _push_base_events(self) -> None: self.push_to_hash(self.project_two.id, self.rule3.id, self.group3.id, self.event3.event_id) self.push_to_hash(self.project_two.id, self.rule4.id, self.group4.id, self.event4.event_id) - def tearDown(self): - self.mock_redis_buffer.__exit__(None, None, None) - - @patch("sentry.rules.processing.delayed_processing.apply_delayed") - def test_fetches_from_buffer_and_executes(self, mock_apply_delayed): + @patch("sentry.rules.processing.delayed_processing.process_rulegroups_in_batches") + def test_fetches_from_buffer_and_executes(self, mock_process_in_batches): self._push_base_events() # To get the correct mapping, we need to return the correct # rulegroup_event mapping based on the project_id input @@ -761,7 +767,7 @@ def test_fetches_from_buffer_and_executes(self, mock_apply_delayed): (self.project, self.rulegroup_event_mapping_one), (self.project_two, self.rulegroup_event_mapping_two), ): - assert mock_apply_delayed.delay.call_count == 2 + assert mock_process_in_batches.call_count == 2 project_ids = buffer.backend.get_sorted_set( PROJECT_ID_BUFFER_LIST_KEY, 0, self.buffer_timestamp @@ -1326,6 +1332,58 @@ def test_apply_delayed_process_count_then_percent(self, safe_execute_callthrough self._assert_count_percent_results(safe_execute_callthrough) +class ProcessRuleGroupsInBatchesTest(CreateEventTestCase): + def setUp(self): + super().setUp() + + self.project = self.create_project() + self.group = self.create_group(self.project) + self.group_two = self.create_group(self.project) + self.group_three = self.create_group(self.project) + self.rule = self.create_alert_rule() + + @patch("sentry.rules.processing.delayed_processing.apply_delayed") + def test_no_redis_data(self, mock_apply_delayed): + process_rulegroups_in_batches(self.project.id) + mock_apply_delayed.delayed.assert_called_once_with(self.project.id) + + @patch("sentry.rules.processing.delayed_processing.apply_delayed") + def test_basic(self, mock_apply_delayed): + self.push_to_hash(self.project.id, self.rule.id, self.group.id) + self.push_to_hash(self.project.id, self.rule.id, self.group_two.id) + self.push_to_hash(self.project.id, self.rule.id, self.group_three.id) + + process_rulegroups_in_batches(self.project.id) + mock_apply_delayed.delayed.assert_called_once_with(self.project.id) + + @override_options({"delayed_processing.batch_size": 2}) + @patch("sentry.rules.processing.delayed_processing.apply_delayed") + def test_batch(self, mock_apply_delayed): + mock_delayed = mock_apply_delayed.delayed + self.push_to_hash(self.project.id, self.rule.id, self.group.id) + self.push_to_hash(self.project.id, self.rule.id, self.group_two.id) + self.push_to_hash(self.project.id, self.rule.id, self.group_three.id) + + process_rulegroups_in_batches(self.project.id) + assert mock_delayed.call_count == 2 + + # Validate the batches are created correctly + batch_one_key = mock_delayed.call_args_list[0][0][1] + batch_one = buffer.backend.get_hash( + model=Project, field={"project_id": self.project.id, "batch_key": batch_one_key} + ) + batch_two_key = mock_delayed.call_args_list[1][0][1] + batch_two = buffer.backend.get_hash( + model=Project, field={"project_id": self.project.id, "batch_key": batch_two_key} + ) + + assert len(batch_one) == 2 + assert len(batch_two) == 1 + + # Validate that we've cleared the original data to reduce storage usage + assert not buffer.backend.get_hash(model=Project, field={"project_id": self.project.id}) + + class UniqueConditionQueryTest(TestCase): """ Tests for the UniqueConditionQuery class. Currently, this is just to pass codecov. @@ -1352,3 +1410,60 @@ def test_repr(self): repr(condition) == "" ) + + +class CleanupRedisBufferTest(CreateEventTestCase): + def setUp(self): + super().setUp() + + self.project = self.create_project() + self.group = self.create_group(self.project) + self.rule = self.create_alert_rule() + + def test_cleanup_redis(self): + self.push_to_hash(self.project.id, self.rule.id, self.group.id) + rules_to_groups: defaultdict[int, set[int]] = defaultdict(set) + rules_to_groups[self.rule.id].add(self.group.id) + + cleanup_redis_buffer(self.project.id, rules_to_groups, None) + rule_group_data = buffer.backend.get_hash(Project, {"project_id": self.project.id}) + assert rule_group_data == {} + + @override_options({"delayed_processing.batch_size": 2}) + @patch("sentry.rules.processing.delayed_processing.apply_delayed") + def test_batched_cleanup(self, mock_apply_delayed): + group_two = self.create_group(self.project) + group_three = self.create_group(self.project) + + self.push_to_hash(self.project.id, self.rule.id, self.group.id) + self.push_to_hash(self.project.id, self.rule.id, group_two.id) + self.push_to_hash(self.project.id, self.rule.id, group_three.id) + + rules_to_groups: defaultdict[int, set[int]] = defaultdict(set) + rules_to_groups[self.rule.id].add(self.group.id) + rules_to_groups[self.rule.id].add(group_two.id) + rules_to_groups[self.rule.id].add(group_three.id) + + process_rulegroups_in_batches(self.project.id) + batch_one_key = mock_apply_delayed.delayed.call_args_list[0][0][1] + batch_two_key = mock_apply_delayed.delayed.call_args_list[1][0][1] + + # Verify process_rulegroups_in_batches removed the data from the buffer + rule_group_data = buffer.backend.get_hash(Project, {"project_id": self.project.id}) + assert rule_group_data == {} + + cleanup_redis_buffer(self.project.id, rules_to_groups, batch_one_key) + + # Verify the batch we "executed" is removed + rule_group_data = buffer.backend.get_hash( + Project, {"project_id": self.project.id, "batch_key": batch_one_key} + ) + assert rule_group_data == {} + + # Verify the batch we didn't execute is still in redis + rule_group_data = buffer.backend.get_hash( + Project, {"project_id": self.project.id, "batch_key": batch_two_key} + ) + assert rule_group_data == { + f"{self.rule.id}:{group_three.id}": '{"event_id":null,"occurrence_id":null}', + } From 1b30ecfde55dded95bc76145dedeba431211557c Mon Sep 17 00:00:00 2001 From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com> Date: Wed, 31 Jul 2024 17:06:59 -0700 Subject: [PATCH 44/52] feat(custom-views): Support renaming tab titles (#75385) This PR adds support for the Rename button that appears in a tab menu. Some implementation specific logic: - Pressing escape discards your current change - Pressing enter confirms your current change - Clicking away from the tab is effectively the same as pressing enter - Empty strings are not allowed. Attempting to save one will revert back to the original value https://github.com/user-attachments/assets/ef211987-a3b4-4f34-9318-1c548a416c51 --- .../app/views/issueList/draggableTabBar.tsx | 34 ++++++-- .../app/views/issueList/editableTabTitle.tsx | 87 +++++++++++++++++++ 2 files changed, 112 insertions(+), 9 deletions(-) create mode 100644 static/app/views/issueList/editableTabTitle.tsx diff --git a/static/app/views/issueList/draggableTabBar.tsx b/static/app/views/issueList/draggableTabBar.tsx index 02db5d2465963..edbb48c7ed33f 100644 --- a/static/app/views/issueList/draggableTabBar.tsx +++ b/static/app/views/issueList/draggableTabBar.tsx @@ -14,6 +14,7 @@ import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import {defined} from 'sentry/utils'; import {DraggableTabMenuButton} from 'sentry/views/issueList/draggableTabMenuButton'; +import EditableTabTitle from 'sentry/views/issueList/editableTabTitle'; export interface Tab { content: React.ReactNode; @@ -50,11 +51,6 @@ export interface DraggableTabBarProps { * Note: The `Duplicate` button only appears for persistent views */ onDuplicate?: (key: MenuItemProps['key']) => void; - /** - * Callback function to be called when user clicks the 'Rename' button. - * Note: The `Rename` button only appears for persistent views - */ - onRename?: (key: MenuItemProps['key']) => void; /** * Callback function to be called when user clicks the 'Save' button. * Note: The `Save` button only appears for persistent views when `isChanged=true` @@ -64,6 +60,11 @@ export interface DraggableTabBarProps { * Callback function to be called when user clicks the 'Save View' button for temporary views. */ onSaveTempView?: () => void; + /** + * Callback function to be called when user renames a tab. + * Note: The `Rename` button only appears for persistent views + */ + onTabRenamed?: (key: MenuItemProps['key'], newLabel: string) => void; tempTabContent?: React.ReactNode; tempTabLabel?: string; } @@ -78,12 +79,14 @@ export function DraggableTabBar({ onDelete, onDiscard, onDuplicate, - onRename, + onTabRenamed, onSave, onDiscardTempView, onSaveTempView, }: DraggableTabBarProps) { const [selectedTabKey, setSelectedTabKey] = useState(tabs[0].key); + // TODO: Extract this to a separate component encompassing Tab.Item in the future + const [editingTabKey, setEditingTabKey] = useState(null); useEffect(() => { if (!showTempTab && selectedTabKey === 'temporary-tab') { @@ -131,6 +134,14 @@ export function DraggableTabBar({ onAddView?.(e); }; + const handleOnTabRenamed = (newLabel: string, tabKey: string) => { + const tab = tabs.find(tb => tb.key === tabKey); + if (tab && newLabel !== tab.label) { + setTabs(tabs.map(tb => (tb.key === tab.key ? {...tb, label: newLabel} : tb))); + onTabRenamed?.(tab.key, newLabel); + } + }; + const makeMenuOptions = (tab: Tab): MenuItemProps[] => { if (tab.key === 'temporary-tab') { return makeTempViewMenuOptions({ @@ -140,7 +151,7 @@ export function DraggableTabBar({ } if (tab.hasUnsavedChanges) { return makeUnsavedChangesMenuOptions({ - onRename, + onRename: () => setEditingTabKey(tab.key), onDuplicate: () => handleOnDuplicate(tab), onDelete: tabs.length > 1 ? () => handleOnDelete(tab) : undefined, onSave, @@ -148,7 +159,7 @@ export function DraggableTabBar({ }); } return makeDefaultMenuOptions({ - onRename, + onRename: () => setEditingTabKey(tab.key), onDuplicate: () => handleOnDuplicate(tab), onDelete: tabs.length > 1 ? () => handleOnDelete(tab) : undefined, }); @@ -171,7 +182,12 @@ export function DraggableTabBar({ hidden={tab.key === 'temporary-tab' && !showTempTab} > - {tab.label} + setEditingTabKey(isEditing ? tab.key : null)} + onChange={newLabel => handleOnTabRenamed(newLabel.trim(), tab.key)} + /> {tab.key !== 'temporary-tab' && tab.queryCount && ( diff --git a/static/app/views/issueList/editableTabTitle.tsx b/static/app/views/issueList/editableTabTitle.tsx new file mode 100644 index 0000000000000..0a1fdfd90e595 --- /dev/null +++ b/static/app/views/issueList/editableTabTitle.tsx @@ -0,0 +1,87 @@ +import {useEffect, useRef, useState} from 'react'; +import styled from '@emotion/styled'; + +function EditableTabTitle({ + label, + onChange, + isEditing, + setIsEditing, +}: { + isEditing: boolean; + label: string; + onChange: (newLabel: string) => void; + setIsEditing: (isEditing: boolean) => void; +}) { + const [inputValue, setInputValue] = useState(label); + + const inputRef = useRef(null); + + const isEmpty = !inputValue.trim(); + + const handleOnBlur = () => { + if (!isEditing) { + return; + } + if (isEmpty) { + setInputValue(label); + return; + } + if (inputValue !== label) { + onChange(inputValue); + } + + setIsEditing(false); + }; + + const handleOnKeyDown = (e: React.KeyboardEvent) => { + if (e.key === 'Enter') { + handleOnBlur(); + } + if (e.key === 'Escape') { + setInputValue(label); + setIsEditing(false); + } + }; + + useEffect(() => { + setTimeout(() => { + inputRef?.current?.focus(); + }, 0); + }, [isEditing, inputRef]); + + const handleOnChange = (e: React.ChangeEvent) => { + setInputValue(e.target.value); + }; + + return isEditing ? ( + 1 ? inputValue.length - 1 : 1} + /> + ) : ( + label + ); +} + +export default EditableTabTitle; + +const StyledInput = styled('input')` + border: none !important; + width: fit-content; + background: transparent; + outline: none; + height: auto; + padding: 0; + font-size: inherit; + &, + &:focus, + &:active, + &:hover { + box-shadow: none; + } +`; From 8eb0584ea1e1e7d7e1e2e5bdcc5d55fe47873b14 Mon Sep 17 00:00:00 2001 From: Abdullah Khan <60121741+Abdkhan14@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:34:44 -0400 Subject: [PATCH 45/52] fix(new-trace): Displaying quota exceeded banner for am3 orgs. (#75410) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Banner wasn't rendering for am3 orgs, threw an error as we tried reading category `transactions`: Screenshot 2024-07-31 at 8 20 58 PM Trace for testing: [link](https://guisoft-lda.dev.getsentry.net:7999/performance/trace/dc43e4c42d9a4e819cdcfde38062369e/) Feature flag: [link](https://github.com/getsentry/sentry/blob/master/static/app/views/performance/newTraceDetails/traceTypeWarnings/errorsOnlyWarnings.tsx#L163) --------- Co-authored-by: Abdullah Khan --- .../traceTypeWarnings/errorsOnlyWarnings.tsx | 44 +++++++++++++------ ...Stats.tsx => usePerformanceUsageStats.tsx} | 12 ++--- 2 files changed, 36 insertions(+), 20 deletions(-) rename static/app/views/performance/newTraceDetails/traceTypeWarnings/{useTransactionUsageStats.tsx => usePerformanceUsageStats.tsx} (83%) diff --git a/static/app/views/performance/newTraceDetails/traceTypeWarnings/errorsOnlyWarnings.tsx b/static/app/views/performance/newTraceDetails/traceTypeWarnings/errorsOnlyWarnings.tsx index 9f39eedc86b0b..b04f2dfc0f7e4 100644 --- a/static/app/views/performance/newTraceDetails/traceTypeWarnings/errorsOnlyWarnings.tsx +++ b/static/app/views/performance/newTraceDetails/traceTypeWarnings/errorsOnlyWarnings.tsx @@ -22,7 +22,7 @@ import type {TraceTree} from '../traceModels/traceTree'; import {TraceType} from '../traceType'; import {TraceWarningComponents} from './styles'; -import {useTransactionUsageStats} from './useTransactionUsageStats'; +import {usePerformanceUsageStats} from './usePerformanceUsageStats'; type ErrorOnlyWarningsProps = { organization: Organization; @@ -127,19 +127,27 @@ function PerformanceSetupBanner({ } type Subscription = { - categories: { - transactions: { - usageExceeded: boolean; - }; - }; + categories: + | { + transactions: { + usageExceeded: boolean; + }; + } + | { + spans: { + usageExceeded: boolean; + }; + }; planDetails: { billingInterval: 'monthly' | 'annual'; - hasOnDemandModes: boolean; + }; + onDemandBudgets?: { + enabled: boolean; }; }; function PerformanceQuotaExceededWarning(props: ErrorOnlyWarningsProps) { - const {data: transactionUsageStats} = useTransactionUsageStats({ + const {data: performanceUsageStats} = usePerformanceUsageStats({ organization: props.organization, tree: props.tree, }); @@ -152,11 +160,19 @@ function PerformanceQuotaExceededWarning(props: ErrorOnlyWarningsProps) { ); // Check if events were dropped due to exceeding the transaction quota, around when the trace occurred. - const droppedTransactionsCount = transactionUsageStats?.totals['sum(quantity)'] || 0; + const droppedTransactionsCount = performanceUsageStats?.totals['sum(quantity)'] || 0; // Check if the organization still has transaction quota maxed out. - const hasExceededTransactionLimit = - subscription?.categories.transactions.usageExceeded || false; + const dataCategories = subscription?.categories; + let hasExceededTransactionLimit = false; + + if (dataCategories) { + if ('transactions' in dataCategories) { + hasExceededTransactionLimit = dataCategories.transactions.usageExceeded || false; + } else if ('spans' in dataCategories) { + hasExceededTransactionLimit = dataCategories.spans.usageExceeded || false; + } + } const hideBanner = droppedTransactionsCount === 0 || @@ -177,12 +193,12 @@ function PerformanceQuotaExceededWarning(props: ErrorOnlyWarningsProps) { const title = tct("You've exceeded your [billingInterval] [billingType]", { billingInterval: subscription?.planDetails.billingInterval ?? 'monthly', - billingType: subscription?.planDetails.hasOnDemandModes + billingType: subscription?.onDemandBudgets?.enabled ? t('pay-as-you-go budget') : t('quota'), }); - const ctaText = subscription?.planDetails?.hasOnDemandModes + const ctaText = subscription?.onDemandBudgets?.enabled ? t('Increase Budget') : t('Increase Volumes'); @@ -196,7 +212,7 @@ function PerformanceQuotaExceededWarning(props: ErrorOnlyWarningsProps) { description={tct( 'Spans are being dropped and monitoring is impacted. To start seeing traces with spans, increase your [billingType].', { - billingType: subscription?.planDetails?.hasOnDemandModes + billingType: subscription?.onDemandBudgets?.enabled ? t('budget') : t('quota'), } diff --git a/static/app/views/performance/newTraceDetails/traceTypeWarnings/useTransactionUsageStats.tsx b/static/app/views/performance/newTraceDetails/traceTypeWarnings/usePerformanceUsageStats.tsx similarity index 83% rename from static/app/views/performance/newTraceDetails/traceTypeWarnings/useTransactionUsageStats.tsx rename to static/app/views/performance/newTraceDetails/traceTypeWarnings/usePerformanceUsageStats.tsx index b5d02ba23af86..b9966c273b9ee 100644 --- a/static/app/views/performance/newTraceDetails/traceTypeWarnings/useTransactionUsageStats.tsx +++ b/static/app/views/performance/newTraceDetails/traceTypeWarnings/usePerformanceUsageStats.tsx @@ -6,9 +6,9 @@ import type {TraceTree} from '../traceModels/traceTree'; // 1 hour in milliseconds const ONE_HOUR = 60 * 60 * 1000; -export type TransactionStatsGroup = { +export type PerformanceStatsGroup = { by: { - reason: 'transaction_usage_exceeded'; + reason: string; }; totals: { 'sum(quantity)': number; @@ -16,10 +16,10 @@ export type TransactionStatsGroup = { }; type PartialUsageStats = { - groups: TransactionStatsGroup[]; + groups: PerformanceStatsGroup[]; }; -export function useTransactionUsageStats({ +export function usePerformanceUsageStats({ organization, tree, }: { @@ -60,8 +60,8 @@ export function useTransactionUsageStats({ return { ...results, - data: results.data?.groups.find( - group => group.by.reason === 'transaction_usage_exceeded' + data: results.data?.groups.find(group => + ['transaction_usage_exceeded', 'span_usage_exceeded'].includes(group.by.reason) ), }; } From 6acce9c20d2932955e615560d1620c77fa465707 Mon Sep 17 00:00:00 2001 From: joshuarli Date: Wed, 31 Jul 2024 18:40:25 -0700 Subject: [PATCH 46/52] feat(devenv): start colima more robustly (#74975) --- requirements-dev-frozen.txt | 2 +- requirements-dev.txt | 2 +- scripts/start-colima.py | 51 ----------------------- scripts/use-colima.sh | 2 +- src/sentry/runner/commands/devservices.py | 6 +-- 5 files changed, 6 insertions(+), 57 deletions(-) delete mode 100644 scripts/start-colima.py diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 44c20e91c8d02..c2f07bd66afaa 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -181,7 +181,7 @@ s3transfer==0.10.0 selenium==4.16.0 sentry-arroyo==2.16.5 sentry-cli==2.16.0 -sentry-devenv==1.7.0 +sentry-devenv==1.8.0 sentry-forked-django-stubs==5.0.4.post1 sentry-forked-djangorestframework-stubs==3.15.0.post1 sentry-kafka-schemas==0.1.102 diff --git a/requirements-dev.txt b/requirements-dev.txt index 51f99bf9ef86a..4bdd3f0473839 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,6 +1,6 @@ --index-url https://pypi.devinfra.sentry.io/simple -sentry-devenv>=1.7.0 +sentry-devenv>=1.8.0 covdefaults>=2.3.0 docker>=6 diff --git a/scripts/start-colima.py b/scripts/start-colima.py deleted file mode 100644 index 9a4be3e1581a5..0000000000000 --- a/scripts/start-colima.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import annotations - -import os -import platform -import subprocess -from collections.abc import Sequence - - -def main(argv: Sequence[str] | None = None) -> int: - if not os.getenv("CI"): - macos_version = platform.mac_ver()[0] - macos_major_version = int(macos_version.split(".")[0]) - if macos_major_version < 14: - raise SystemExit(f"macos >= 14 is required to use colima, found {macos_version}") - - cpus = os.cpu_count() - if cpus is None: - raise SystemExit("failed to determine cpu count") - - # SC_PAGE_SIZE is POSIX 2008 - # SC_PHYS_PAGES is a linux addition but also supported by more recent MacOS versions - SC_PAGE_SIZE = os.sysconf("SC_PAGE_SIZE") - SC_PHYS_PAGES = os.sysconf("SC_PHYS_PAGES") - if SC_PAGE_SIZE == -1 or SC_PHYS_PAGES == -1: - raise SystemExit("failed to determine memsize_bytes") - memsize_bytes = os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PHYS_PAGES") - - args = [ - "--cpu", - f"{cpus//2}", - "--memory", - f"{memsize_bytes//(2*1024**3)}", - ] - if platform.machine() == "arm64": - args = [*args, "--vm-type=vz", "--vz-rosetta", "--mount-type=virtiofs"] - HOME = os.path.expanduser("~") - rc = subprocess.call( - ( - "colima", - "start", - f"--mount=/var/folders:w,/private/tmp/colima:w,{HOME}:r", - *args, - ) - ) - if rc != 0: - return rc - return subprocess.call(("docker", "context", "use", "colima")) - - -if __name__ == "__main__": - raise SystemExit(main()) diff --git a/scripts/use-colima.sh b/scripts/use-colima.sh index b49700b0adfba..375f9defd3d63 100755 --- a/scripts/use-colima.sh +++ b/scripts/use-colima.sh @@ -55,7 +55,7 @@ with open(os.path.expanduser("~/.docker/config.json"), "w") as f: EOF echo "Starting colima." -python3 -uS scripts/start-colima.py +devenv colima start echo "Recreating your postgres volume for use with colima. May take a few minutes." docker volume create --name sentry_postgres diff --git a/src/sentry/runner/commands/devservices.py b/src/sentry/runner/commands/devservices.py index f7d442ef27974..2fcc274234c9f 100644 --- a/src/sentry/runner/commands/devservices.py +++ b/src/sentry/runner/commands/devservices.py @@ -67,9 +67,9 @@ def _client() -> ContextManager[docker.DockerClient]: click.echo("Attempting to start colima...") subprocess.check_call( ( - "python3", - "-uS", - f"{os.path.dirname(__file__)}/../../../../scripts/start-colima.py", + "devenv", + "colima", + "start", ) ) elif USE_DOCKER_DESKTOP: From efe481e8737ed880b5c3b01c35b81052f7ac7697 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Thu, 1 Aug 2024 02:17:12 +0000 Subject: [PATCH 47/52] Revert "Process delayed alert conditions in batches of 10,000 (#75302)" This reverts commit 215491de2bcd42dbbe7c18b5bae1bcec6257b0b6. Co-authored-by: saponifi3d <1569818+saponifi3d@users.noreply.github.com> --- src/sentry/buffer/base.py | 14 -- src/sentry/buffer/redis.py | 18 --- src/sentry/options/defaults.py | 5 - .../rules/processing/delayed_processing.py | 101 ++++-------- tests/sentry/buffer/test_base.py | 7 - tests/sentry/buffer/test_redis.py | 31 ---- .../processing/test_delayed_processing.py | 149 ++---------------- 7 files changed, 45 insertions(+), 280 deletions(-) diff --git a/src/sentry/buffer/base.py b/src/sentry/buffer/base.py index f2ef35ad7440e..6182667947c5f 100644 --- a/src/sentry/buffer/base.py +++ b/src/sentry/buffer/base.py @@ -34,7 +34,6 @@ class Buffer(Service): "push_to_hash", "get_sorted_set", "get_hash", - "get_hash_length", "delete_hash", "delete_key", ) @@ -55,11 +54,6 @@ def get_hash( ) -> dict[str, str]: return {} - def get_hash_length( - self, model: type[models.Model], field: dict[str, models.Model | str | int] - ) -> int: - raise NotImplementedError - def get_sorted_set(self, key: str, min: float, max: float) -> list[tuple[int, datetime]]: return [] @@ -75,14 +69,6 @@ def push_to_hash( ) -> None: return None - def push_to_hash_bulk( - self, - model: type[models.Model], - filters: dict[str, models.Model | str | int], - data: dict[str, str], - ) -> None: - raise NotImplementedError - def delete_hash( self, model: type[models.Model], diff --git a/src/sentry/buffer/redis.py b/src/sentry/buffer/redis.py index bce5263f42789..27c00b6d77887 100644 --- a/src/sentry/buffer/redis.py +++ b/src/sentry/buffer/redis.py @@ -81,16 +81,13 @@ def callback(self, buffer_hook_event: BufferHookEvent) -> bool: redis_buffer_registry = BufferHookRegistry() -# Note HMSET is not supported after redis 4.0.0, after updating we can use HSET directly. class RedisOperation(Enum): SORTED_SET_ADD = "zadd" SORTED_SET_GET_RANGE = "zrangebyscore" SORTED_SET_DELETE_RANGE = "zremrangebyscore" HASH_ADD = "hset" - HASH_ADD_BULK = "hmset" HASH_GET_ALL = "hgetall" HASH_DELETE = "hdel" - HASH_LENGTH = "hlen" class PendingBuffer: @@ -299,15 +296,6 @@ def push_to_hash( key = self._make_key(model, filters) self._execute_redis_operation(key, RedisOperation.HASH_ADD, field, value) - def push_to_hash_bulk( - self, - model: type[models.Model], - filters: dict[str, models.Model | str | int], - data: dict[str, str], - ) -> None: - key = self._make_key(model, filters) - self._execute_redis_operation(key, RedisOperation.HASH_ADD_BULK, data) - def get_hash( self, model: type[models.Model], field: dict[str, models.Model | str | int] ) -> dict[str, str]: @@ -323,12 +311,6 @@ def get_hash( return decoded_hash - def get_hash_length( - self, model: type[models.Model], field: dict[str, models.Model | str | int] - ) -> int: - key = self._make_key(model, field) - return self._execute_redis_operation(key, RedisOperation.HASH_LENGTH) - def process_batch(self) -> None: try: redis_buffer_registry.callback(BufferHookEvent.FLUSH) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index d0a5b5a4af874..cb92d5b4281b6 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -2636,8 +2636,3 @@ default=1, flags=FLAG_AUTOMATOR_MODIFIABLE, ) -register( - "delayed_processing.batch_size", - default=10000, - flags=FLAG_AUTOMATOR_MODIFIABLE, -) diff --git a/src/sentry/rules/processing/delayed_processing.py b/src/sentry/rules/processing/delayed_processing.py index 41f69e7557214..68248e037783f 100644 --- a/src/sentry/rules/processing/delayed_processing.py +++ b/src/sentry/rules/processing/delayed_processing.py @@ -4,15 +4,13 @@ from collections import defaultdict from collections.abc import Sequence from datetime import datetime, timedelta, timezone -from itertools import islice from typing import Any, DefaultDict, NamedTuple import sentry_sdk from django.db.models import OuterRef, Subquery -from sentry import buffer, nodestore, options +from sentry import buffer, nodestore from sentry.buffer.redis import BufferHookEvent, redis_buffer_registry -from sentry.db import models from sentry.eventstore.models import Event, GroupEvent from sentry.issues.issue_occurrence import IssueOccurrence from sentry.models.group import Group @@ -87,15 +85,8 @@ def fetch_project(project_id: int) -> Project | None: return None -def fetch_rulegroup_to_event_data(project_id: int, batch_key: str | None = None) -> dict[str, str]: - field: dict[str, models.Model | int | str] = { - "project_id": project_id, - } - - if batch_key: - field["batch_key"] = batch_key - - return buffer.backend.get_hash(model=Project, field=field) +def fetch_rulegroup_to_event_data(project_id: int) -> dict[str, str]: + return buffer.backend.get_hash(model=Project, field={"project_id": project_id}) def get_rules_to_groups(rulegroup_to_event_data: dict[str, str]) -> DefaultDict[int, set[int]]: @@ -456,17 +447,13 @@ def fire_rules( safe_execute(callback, groupevent, futures) -def cleanup_redis_buffer( - project_id: int, rules_to_groups: DefaultDict[int, set[int]], batch_key: str | None -) -> None: +def cleanup_redis_buffer(project_id: int, rules_to_groups: DefaultDict[int, set[int]]) -> None: hashes_to_delete = [ f"{rule}:{group}" for rule, groups in rules_to_groups.items() for group in groups ] - filters: dict[str, models.Model | str | int] = {"project_id": project_id} - if batch_key: - filters["batch_key"] = batch_key - - buffer.backend.delete_hash(model=Project, filters=filters, fields=hashes_to_delete) + buffer.backend.delete_hash( + model=Project, filters={"project_id": project_id}, fields=hashes_to_delete + ) def bucket_num_groups(num_groups: int) -> str: @@ -476,55 +463,6 @@ def bucket_num_groups(num_groups: int) -> str: return "1" -def process_rulegroups_in_batches(project_id: int): - """ - This will check the number of rulegroup_to_event_data items in the Redis buffer for a project. - - If the number is larger than the batch size, it will chunk the items and process them in batches. - - The batches are replicated into a new redis hash with a unique filter (a uuid) to identify the batch. - We need to use a UUID because these batches can be created in multiple processes and we need to ensure - uniqueness across all of them for the centralized redis buffer. The batches are stored in redis because - we shouldn't pass objects that need to be pickled and 10k items could be problematic in the celery tasks - as arguments could be problematic. Finally, we can't use a pagination system on the data because - redis doesn't maintain the sort order of the hash keys. - - `apply_delayed` will fetch the batch from redis and process the rules. - """ - batch_size = options.get("delayed_processing.batch_size") - event_count = buffer.backend.get_hash_length(Project, {"project_id": project_id}) - - if event_count < batch_size: - return apply_delayed.delayed(project_id) - - logger.info( - "delayed_processing.process_large_batch", - extra={"project_id": project_id, "count": event_count}, - ) - - # if the dictionary is large, get the items and chunk them. - rulegroup_to_event_data = fetch_rulegroup_to_event_data(project_id) - - with metrics.timer("delayed_processing.process_batch.duration"): - items = iter(rulegroup_to_event_data.items()) - - while batch := dict(islice(items, batch_size)): - batch_key = str(uuid.uuid4()) - - buffer.backend.push_to_hash_bulk( - model=Project, - filters={"project_id": project_id, "batch_key": batch_key}, - data=batch, - ) - - # remove the batched items from the project rulegroup_to_event_data - buffer.backend.delete_hash( - model=Project, filters={"project_id": project_id}, fields=list(batch.keys()) - ) - - apply_delayed.delayed(project_id, batch_key) - - def process_delayed_alert_conditions() -> None: with metrics.timer("delayed_processing.process_all_conditions.duration"): fetch_time = datetime.now(tz=timezone.utc) @@ -535,7 +473,7 @@ def process_delayed_alert_conditions() -> None: logger.info("delayed_processing.project_id_list", extra={"project_ids": log_str}) for project_id, _ in project_ids: - process_rulegroups_in_batches(project_id) + apply_delayed.delay(project_id) buffer.backend.delete_key(PROJECT_ID_BUFFER_LIST_KEY, min=0, max=fetch_time.timestamp()) @@ -549,15 +487,32 @@ def process_delayed_alert_conditions() -> None: time_limit=60, silo_mode=SiloMode.REGION, ) -def apply_delayed(project_id: int, batch_key: str | None = None, *args: Any, **kwargs: Any) -> None: +def apply_delayed(project_id: int, *args: Any, **kwargs: Any) -> None: """ Grab rules, groups, and events from the Redis buffer, evaluate the "slow" conditions in a bulk snuba query, and fire them if they pass """ project = fetch_project(project_id) if not project: + # Should we remove the project_id from the redis queue? + return + + rulegroup_to_event_data = fetch_rulegroup_to_event_data(project_id) + num_groups = len(rulegroup_to_event_data) + num_groups_bucketed = bucket_num_groups(num_groups) + metrics.incr("delayed_processing.num_groups", tags={"num_groups": num_groups_bucketed}) + + if num_groups >= 10000: + logger.error( + "delayed_processing.too_many_groups", + extra={ + "project_id": project_id, + "num_groups": num_groups, + "organization_id": project.organization_id, + }, + ) + # TODO @saponifi3d - Split the processing from here into smaller groups return - rulegroup_to_event_data = fetch_rulegroup_to_event_data(project_id, batch_key) rules_to_groups = get_rules_to_groups(rulegroup_to_event_data) alert_rules = fetch_alert_rules(list(rules_to_groups.keys())) condition_groups = get_condition_query_groups(alert_rules, rules_to_groups) @@ -587,7 +542,7 @@ def apply_delayed(project_id: int, batch_key: str | None = None, *args: Any, **k with metrics.timer("delayed_processing.fire_rules.duration"): fire_rules(rules_to_fire, parsed_rulegroup_to_event_data, alert_rules, project) - cleanup_redis_buffer(project_id, rules_to_groups, batch_key) + cleanup_redis_buffer(project_id, rules_to_groups) if not redis_buffer_registry.has(BufferHookEvent.FLUSH): diff --git a/tests/sentry/buffer/test_base.py b/tests/sentry/buffer/test_base.py index c0cd0d241ab9b..5f216d8bc2a5a 100644 --- a/tests/sentry/buffer/test_base.py +++ b/tests/sentry/buffer/test_base.py @@ -2,7 +2,6 @@ from unittest import mock from django.utils import timezone -from pytest import raises from sentry.buffer.base import Buffer from sentry.db import models @@ -78,9 +77,3 @@ def test_signal_only(self, create_or_update): self.buf.process(Group, columns, filters, {"last_seen": the_date}, signal_only=True) group.refresh_from_db() assert group.times_seen == prev_times_seen - - def test_push_to_hash_bulk(self): - raises(NotImplementedError, self.buf.push_to_hash_bulk, Group, {"id": 1}, {"foo": "bar"}) - - def test_get_hash_length(self): - raises(NotImplementedError, self.buf.get_hash_length, Group, {"id": 1}) diff --git a/tests/sentry/buffer/test_redis.py b/tests/sentry/buffer/test_redis.py index c725ec4e78e16..d2da538de87e9 100644 --- a/tests/sentry/buffer/test_redis.py +++ b/tests/sentry/buffer/test_redis.py @@ -1,7 +1,6 @@ import datetime import pickle from collections import defaultdict -from collections.abc import Mapping from unittest import mock from unittest.mock import Mock @@ -368,36 +367,6 @@ def test_process_uses_signal_only(self, process): self.buf.process("foo") process.assert_called_once_with(mock.Mock, {"times_seen": 1}, {"pk": 1}, {}, True) - @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo")) - def test_get_hash_length(self): - client = get_cluster_routing_client(self.buf.cluster, self.buf.is_redis_cluster) - data: Mapping[str | bytes, bytes | float | int | str] = { - "f": '{"pk": ["i","1"]}', - "i+times_seen": "1", - "m": "unittest.mock.Mock", - "s": "1", - } - - client.hmset("foo", data) - buffer_length = self.buf.get_hash_length("foo", field={"bar": 1}) - assert buffer_length == len(data) - - @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo")) - def test_push_to_hash_bulk(self): - def decode_dict(d): - return {k: v.decode("utf-8") if isinstance(v, bytes) else v for k, v in d.items()} - - client = get_cluster_routing_client(self.buf.cluster, self.buf.is_redis_cluster) - data = { - "f": '{"pk": ["i","1"]}', - "i+times_seen": "1", - "m": "unittest.mock.Mock", - "s": "1", - } - self.buf.push_to_hash_bulk(model=Project, filters={"project_id": 1}, data=data) - result = _hgetall_decode_keys(client, "foo", self.buf.is_redis_cluster) - assert decode_dict(result) == data - # @mock.patch("sentry.buffer.redis.RedisBuffer._make_key", mock.Mock(return_value="foo")) # def test_incr_uses_signal_only(self): diff --git a/tests/sentry/rules/processing/test_delayed_processing.py b/tests/sentry/rules/processing/test_delayed_processing.py index c049ecf6040e5..2330115404f12 100644 --- a/tests/sentry/rules/processing/test_delayed_processing.py +++ b/tests/sentry/rules/processing/test_delayed_processing.py @@ -25,7 +25,6 @@ apply_delayed, bucket_num_groups, bulk_fetch_events, - cleanup_redis_buffer, generate_unique_queries, get_condition_group_results, get_condition_query_groups, @@ -35,13 +34,11 @@ get_slow_conditions, parse_rulegroup_to_event_data, process_delayed_alert_conditions, - process_rulegroups_in_batches, ) from sentry.rules.processing.processor import PROJECT_ID_BUFFER_LIST_KEY from sentry.testutils.cases import PerformanceIssueTestCase, RuleTestCase, TestCase from sentry.testutils.factories import EventType from sentry.testutils.helpers.datetime import before_now, freeze_time, iso_format -from sentry.testutils.helpers.options import override_options from sentry.testutils.helpers.redis import mock_redis_buffer from sentry.utils import json from sentry.utils.safe import safe_execute @@ -86,23 +83,6 @@ def _callthrough_with_order(*args, **kwargs): @freeze_time(FROZEN_TIME) class CreateEventTestCase(TestCase, BaseEventFrequencyPercentTest): - def setUp(self): - super().setUp() - self.mock_redis_buffer = mock_redis_buffer() - self.mock_redis_buffer.__enter__() - - def tearDown(self): - self.mock_redis_buffer.__exit__(None, None, None) - - def push_to_hash(self, project_id, rule_id, group_id, event_id=None, occurrence_id=None): - value = json.dumps({"event_id": event_id, "occurrence_id": occurrence_id}) - buffer.backend.push_to_hash( - model=Project, - filters={"project_id": project_id}, - field=f"{rule_id}:{group_id}", - value=value, - ) - def create_event( self, project_id: int, @@ -663,12 +643,23 @@ def test_parse_rulegroup_invalid_json(self): class ProcessDelayedAlertConditionsTest(CreateEventTestCase, PerformanceIssueTestCase): buffer_timestamp = (FROZEN_TIME + timedelta(seconds=1)).timestamp() + def push_to_hash(self, project_id, rule_id, group_id, event_id=None, occurrence_id=None): + value = json.dumps({"event_id": event_id, "occurrence_id": occurrence_id}) + buffer.backend.push_to_hash( + model=Project, + filters={"project_id": project_id}, + field=f"{rule_id}:{group_id}", + value=value, + ) + def assert_buffer_cleared(self, project_id): rule_group_data = buffer.backend.get_hash(Project, {"project_id": project_id}) assert rule_group_data == {} def setUp(self): super().setUp() + self.mock_redis_buffer = mock_redis_buffer() + self.mock_redis_buffer.__enter__() self.tag_filter = { "id": "sentry.rules.filters.tagged_event.TaggedEventFilter", @@ -756,8 +747,11 @@ def _push_base_events(self) -> None: self.push_to_hash(self.project_two.id, self.rule3.id, self.group3.id, self.event3.event_id) self.push_to_hash(self.project_two.id, self.rule4.id, self.group4.id, self.event4.event_id) - @patch("sentry.rules.processing.delayed_processing.process_rulegroups_in_batches") - def test_fetches_from_buffer_and_executes(self, mock_process_in_batches): + def tearDown(self): + self.mock_redis_buffer.__exit__(None, None, None) + + @patch("sentry.rules.processing.delayed_processing.apply_delayed") + def test_fetches_from_buffer_and_executes(self, mock_apply_delayed): self._push_base_events() # To get the correct mapping, we need to return the correct # rulegroup_event mapping based on the project_id input @@ -767,7 +761,7 @@ def test_fetches_from_buffer_and_executes(self, mock_process_in_batches): (self.project, self.rulegroup_event_mapping_one), (self.project_two, self.rulegroup_event_mapping_two), ): - assert mock_process_in_batches.call_count == 2 + assert mock_apply_delayed.delay.call_count == 2 project_ids = buffer.backend.get_sorted_set( PROJECT_ID_BUFFER_LIST_KEY, 0, self.buffer_timestamp @@ -1332,58 +1326,6 @@ def test_apply_delayed_process_count_then_percent(self, safe_execute_callthrough self._assert_count_percent_results(safe_execute_callthrough) -class ProcessRuleGroupsInBatchesTest(CreateEventTestCase): - def setUp(self): - super().setUp() - - self.project = self.create_project() - self.group = self.create_group(self.project) - self.group_two = self.create_group(self.project) - self.group_three = self.create_group(self.project) - self.rule = self.create_alert_rule() - - @patch("sentry.rules.processing.delayed_processing.apply_delayed") - def test_no_redis_data(self, mock_apply_delayed): - process_rulegroups_in_batches(self.project.id) - mock_apply_delayed.delayed.assert_called_once_with(self.project.id) - - @patch("sentry.rules.processing.delayed_processing.apply_delayed") - def test_basic(self, mock_apply_delayed): - self.push_to_hash(self.project.id, self.rule.id, self.group.id) - self.push_to_hash(self.project.id, self.rule.id, self.group_two.id) - self.push_to_hash(self.project.id, self.rule.id, self.group_three.id) - - process_rulegroups_in_batches(self.project.id) - mock_apply_delayed.delayed.assert_called_once_with(self.project.id) - - @override_options({"delayed_processing.batch_size": 2}) - @patch("sentry.rules.processing.delayed_processing.apply_delayed") - def test_batch(self, mock_apply_delayed): - mock_delayed = mock_apply_delayed.delayed - self.push_to_hash(self.project.id, self.rule.id, self.group.id) - self.push_to_hash(self.project.id, self.rule.id, self.group_two.id) - self.push_to_hash(self.project.id, self.rule.id, self.group_three.id) - - process_rulegroups_in_batches(self.project.id) - assert mock_delayed.call_count == 2 - - # Validate the batches are created correctly - batch_one_key = mock_delayed.call_args_list[0][0][1] - batch_one = buffer.backend.get_hash( - model=Project, field={"project_id": self.project.id, "batch_key": batch_one_key} - ) - batch_two_key = mock_delayed.call_args_list[1][0][1] - batch_two = buffer.backend.get_hash( - model=Project, field={"project_id": self.project.id, "batch_key": batch_two_key} - ) - - assert len(batch_one) == 2 - assert len(batch_two) == 1 - - # Validate that we've cleared the original data to reduce storage usage - assert not buffer.backend.get_hash(model=Project, field={"project_id": self.project.id}) - - class UniqueConditionQueryTest(TestCase): """ Tests for the UniqueConditionQuery class. Currently, this is just to pass codecov. @@ -1410,60 +1352,3 @@ def test_repr(self): repr(condition) == "" ) - - -class CleanupRedisBufferTest(CreateEventTestCase): - def setUp(self): - super().setUp() - - self.project = self.create_project() - self.group = self.create_group(self.project) - self.rule = self.create_alert_rule() - - def test_cleanup_redis(self): - self.push_to_hash(self.project.id, self.rule.id, self.group.id) - rules_to_groups: defaultdict[int, set[int]] = defaultdict(set) - rules_to_groups[self.rule.id].add(self.group.id) - - cleanup_redis_buffer(self.project.id, rules_to_groups, None) - rule_group_data = buffer.backend.get_hash(Project, {"project_id": self.project.id}) - assert rule_group_data == {} - - @override_options({"delayed_processing.batch_size": 2}) - @patch("sentry.rules.processing.delayed_processing.apply_delayed") - def test_batched_cleanup(self, mock_apply_delayed): - group_two = self.create_group(self.project) - group_three = self.create_group(self.project) - - self.push_to_hash(self.project.id, self.rule.id, self.group.id) - self.push_to_hash(self.project.id, self.rule.id, group_two.id) - self.push_to_hash(self.project.id, self.rule.id, group_three.id) - - rules_to_groups: defaultdict[int, set[int]] = defaultdict(set) - rules_to_groups[self.rule.id].add(self.group.id) - rules_to_groups[self.rule.id].add(group_two.id) - rules_to_groups[self.rule.id].add(group_three.id) - - process_rulegroups_in_batches(self.project.id) - batch_one_key = mock_apply_delayed.delayed.call_args_list[0][0][1] - batch_two_key = mock_apply_delayed.delayed.call_args_list[1][0][1] - - # Verify process_rulegroups_in_batches removed the data from the buffer - rule_group_data = buffer.backend.get_hash(Project, {"project_id": self.project.id}) - assert rule_group_data == {} - - cleanup_redis_buffer(self.project.id, rules_to_groups, batch_one_key) - - # Verify the batch we "executed" is removed - rule_group_data = buffer.backend.get_hash( - Project, {"project_id": self.project.id, "batch_key": batch_one_key} - ) - assert rule_group_data == {} - - # Verify the batch we didn't execute is still in redis - rule_group_data = buffer.backend.get_hash( - Project, {"project_id": self.project.id, "batch_key": batch_two_key} - ) - assert rule_group_data == { - f"{self.rule.id}:{group_three.id}": '{"event_id":null,"occurrence_id":null}', - } From 3efa8cf306b9650d8129ffeaca77601b833b1009 Mon Sep 17 00:00:00 2001 From: Zach Collins Date: Wed, 31 Jul 2024 20:12:36 -0700 Subject: [PATCH 48/52] Revert "chore(hybrid-cloud): Resubmits Pydantic v2.7 upgrade" (#75411) Reverts getsentry/sentry#75311 Prepatory if necessary, pulling in https://github.com/getsentry/sentry/pull/75386 --- pyproject.toml | 3 --- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 4 +--- requirements-frozen.txt | 4 +--- src/sentry/autofix/utils.py | 2 +- src/sentry/hybridcloud/rpc/__init__.py | 21 ++++--------------- src/sentry/hybridcloud/rpc/sig.py | 6 +----- src/sentry/types/region.py | 5 ++--- src/sentry/users/services/user/serial.py | 2 +- .../test_organization_sdk_updates.py | 9 +------- ...ization_sentry_app_installation_details.py | 12 +---------- tests/sentry/test_dependencies.py | 8 +++++++ 12 files changed, 22 insertions(+), 56 deletions(-) create mode 100644 tests/sentry/test_dependencies.py diff --git a/pyproject.toml b/pyproject.toml index f409a750bad0e..93c00fb8e6c24 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,9 +39,6 @@ filterwarnings = [ # pytest has not yet implemented the replacement for this yet "ignore:The --looponfail command line argument.*", - - # Temporarily disable deprecation warnings for pydantic while we upgrade it - "ignore::DeprecationWarning:pydantic.*", ] looponfailroots = ["src", "tests"] diff --git a/requirements-base.txt b/requirements-base.txt index 1fa5e26a651c4..b23a105c3acde 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -45,7 +45,7 @@ python-rapidjson>=1.4 psutil>=5.9.2 psycopg2-binary>=2.9.9 PyJWT>=2.4.0 -pydantic>=2.5.0 +pydantic>=1.10.17,<2 python-dateutil>=2.9.0 pymemcache python-u2flib-server>=5.0.0 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index c2f07bd66afaa..a0e9b541951a7 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -7,7 +7,6 @@ --index-url https://pypi.devinfra.sentry.io/simple amqp==5.2.0 -annotated-types==0.7.0 anyio==3.7.1 asgiref==3.7.2 attrs==23.1.0 @@ -138,8 +137,7 @@ pyasn1-modules==0.2.4 pycodestyle==2.11.0 pycountry==17.5.14 pycparser==2.21 -pydantic==2.7.4 -pydantic-core==2.18.4 +pydantic==1.10.17 pyflakes==3.2.0 pyjwt==2.4.0 pymemcache==4.0.0 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 668c1d5e2fae1..2b1ca6ad44806 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -7,7 +7,6 @@ --index-url https://pypi.devinfra.sentry.io/simple amqp==5.2.0 -annotated-types==0.7.0 anyio==3.7.1 asgiref==3.7.2 attrs==23.1.0 @@ -97,8 +96,7 @@ pyasn1==0.4.5 pyasn1-modules==0.2.4 pycountry==17.5.14 pycparser==2.21 -pydantic==2.7.4 -pydantic-core==2.18.4 +pydantic==1.10.17 pyjwt==2.4.0 pymemcache==4.0.0 pyparsing==3.0.9 diff --git a/src/sentry/autofix/utils.py b/src/sentry/autofix/utils.py index d100ddb7ece07..ede2a34cb2cb8 100644 --- a/src/sentry/autofix/utils.py +++ b/src/sentry/autofix/utils.py @@ -1,11 +1,11 @@ import datetime import enum +from typing import TypedDict import orjson import requests from django.conf import settings from pydantic import BaseModel -from typing_extensions import TypedDict from sentry.integrations.utils.code_mapping import get_sorted_code_mapping_configs from sentry.models.project import Project diff --git a/src/sentry/hybridcloud/rpc/__init__.py b/src/sentry/hybridcloud/rpc/__init__.py index 91061ecbb18a3..d644e115ef78c 100644 --- a/src/sentry/hybridcloud/rpc/__init__.py +++ b/src/sentry/hybridcloud/rpc/__init__.py @@ -11,7 +11,6 @@ import pydantic from django.db import router, transaction from django.db.models import Model -from pydantic import ConfigDict from sentry.silo.base import SiloMode from sentry.utils.env import in_test_environment @@ -44,25 +43,13 @@ def __hash__(self) -> int: class RpcModel(pydantic.BaseModel): """A serializable object that may be part of an RPC schema.""" - # TODO(Hybrid-Cloud): Remove number coercion after pydantic V2 stabilized - model_config = ConfigDict( - from_attributes=True, use_enum_values=True, coerce_numbers_to_str=True - ) - - def __setstate__(self, state: dict[Any, Any]) -> None: - """ - __setstate__ override to alleviate an unpickling issue in production with the pydantic version upgrade. - """ - state.setdefault("__pydantic_extra__", {}) - state.setdefault("__pydantic_private__", {}) - - if "__pydantic_fields_set__" not in state: - state["__pydantic_fields_set__"] = state.get("__fields_set__") - super().__setstate__(state) + class Config: + orm_mode = True + use_enum_values = True @classmethod def get_field_names(cls) -> Iterable[str]: - return iter(cls.model_fields.keys()) + return iter(cls.__fields__.keys()) @classmethod def serialize_by_field_name( diff --git a/src/sentry/hybridcloud/rpc/sig.py b/src/sentry/hybridcloud/rpc/sig.py index 0331be3f85d55..7782d5693cd6f 100644 --- a/src/sentry/hybridcloud/rpc/sig.py +++ b/src/sentry/hybridcloud/rpc/sig.py @@ -7,7 +7,6 @@ import pydantic from django.utils.functional import LazyObject -from pydantic import ConfigDict from sentry.hybridcloud.rpc import ArgumentDict @@ -82,10 +81,7 @@ def create_field(param: inspect.Parameter) -> tuple[Any, Any]: if self.is_instance_method: parameters = parameters[1:] # exclude `self` argument field_definitions = {p.name: create_field(p) for p in parameters} - - # TODO(Hybrid-Cloud): Remove number coercion after pydantic V2 stabilized - config = ConfigDict(coerce_numbers_to_str=True) - return pydantic.create_model(model_name, __config__=config, **field_definitions) # type: ignore[call-overload] + return pydantic.create_model(model_name, **field_definitions) # type: ignore[call-overload] _RETURN_MODEL_ATTR = "value" diff --git a/src/sentry/types/region.py b/src/sentry/types/region.py index ac0cabc1241ef..50a8614d1875a 100644 --- a/src/sentry/types/region.py +++ b/src/sentry/types/region.py @@ -8,8 +8,8 @@ import sentry_sdk from django.conf import settings from django.http import HttpRequest -from pydantic import TypeAdapter from pydantic.dataclasses import dataclass +from pydantic.tools import parse_obj_as from sentry import options from sentry.silo.base import SiloMode, SingleProcessSiloModeState, control_silo_function @@ -151,8 +151,7 @@ def validate_all(self) -> None: def _parse_raw_config(region_config: Any) -> Iterable[Region]: if isinstance(region_config, (str, bytes)): json_config_values = json.loads(region_config) - adapter = TypeAdapter(list[Region]) - config_values = adapter.validate_python(json_config_values) + config_values = parse_obj_as(list[Region], json_config_values) else: config_values = region_config diff --git a/src/sentry/users/services/user/serial.py b/src/sentry/users/services/user/serial.py index eef96de5b4824..30987ad398cdc 100644 --- a/src/sentry/users/services/user/serial.py +++ b/src/sentry/users/services/user/serial.py @@ -36,7 +36,7 @@ def serialize_generic_user(user: Any) -> RpcUser | None: def _serialize_from_user_fields(user: User) -> dict[str, Any]: args = { field_name: getattr(user, field_name) - for field_name in RpcUserProfile.model_fields + for field_name in RpcUserProfile.__fields__ if hasattr(user, field_name) } args["pk"] = user.pk diff --git a/tests/sentry/api/endpoints/test_organization_sdk_updates.py b/tests/sentry/api/endpoints/test_organization_sdk_updates.py index 1d64fda6b5af4..5625a2213546d 100644 --- a/tests/sentry/api/endpoints/test_organization_sdk_updates.py +++ b/tests/sentry/api/endpoints/test_organization_sdk_updates.py @@ -2,7 +2,6 @@ import pytest from django.urls import reverse -from pydantic import PydanticDeprecatedSince20 from sentry.sdk_updates import SdkIndexState from sentry.testutils.cases import APITestCase, SnubaTestCase @@ -189,14 +188,8 @@ def test_unknown_version(self, mock_index_state): update_suggestions = response.data assert len(update_suggestions) == 0 - # TODO(Gabe): Temporary kludge to allow this to pass while pydantic - # deprecation warnings are active. - filtered_warnings = [ - info for info in warninfo if not isinstance(info.message, PydanticDeprecatedSince20) - ] - # until it is turned into an error, we'll get a warning about parsing an invalid version - (warning,) = filtered_warnings + (warning,) = warninfo assert isinstance(warning.message, DeprecationWarning) (warn_msg,) = warning.message.args assert ( diff --git a/tests/sentry/api/endpoints/test_organization_sentry_app_installation_details.py b/tests/sentry/api/endpoints/test_organization_sentry_app_installation_details.py index 81884462f4471..2f690b4d40305 100644 --- a/tests/sentry/api/endpoints/test_organization_sentry_app_installation_details.py +++ b/tests/sentry/api/endpoints/test_organization_sentry_app_installation_details.py @@ -1,4 +1,3 @@ -import unittest from unittest.mock import patch import responses @@ -112,16 +111,7 @@ def test_delete_install(self, record, run): assert AuditLogEntry.objects.filter( event=audit_log.get_event_id("SENTRY_APP_UNINSTALL") ).exists() - - # user is wrapped in a SimpleLazyObject, meaning we can't assert equality - # via the method parameter check. Manually retrieve and check it instead. - run.assert_called_once_with( - install=self.orm_installation2, user=unittest.mock.ANY, action="deleted" - ) - - mock_call_user = run.mock_calls[0].kwargs.get("user") - assert mock_call_user == rpc_user - + run.assert_called_once_with(install=self.orm_installation2, user=rpc_user, action="deleted") record.assert_called_with( "sentry_app.uninstalled", user_id=self.user.id, diff --git a/tests/sentry/test_dependencies.py b/tests/sentry/test_dependencies.py new file mode 100644 index 0000000000000..eebd8e5fe696c --- /dev/null +++ b/tests/sentry/test_dependencies.py @@ -0,0 +1,8 @@ +import pydantic + + +def test_pydantic_1x_compiled() -> None: + if not pydantic.VERSION.startswith("1."): + raise AssertionError("delete this test, it only applies to pydantic 1.x") + # pydantic is horribly slow when not cythonized + assert pydantic.__file__.endswith(".so") From 3860f22d87c5f544fad40d8c2419b3552be2abfe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vjeran=20Grozdani=C4=87?= Date: Thu, 1 Aug 2024 09:04:36 +0200 Subject: [PATCH 49/52] ref(metrics-extraction) Improve typing of the build_safe_config (#75341) Improves typing of the `build_safe_config` function, and removes `# type ignore` comments. --- src/sentry/relay/config/experimental.py | 18 ++++++++---------- src/sentry/relay/config/metric_extraction.py | 10 +++++----- tests/sentry/relay/config/test_experimental.py | 12 ++++++------ 3 files changed, 19 insertions(+), 21 deletions(-) diff --git a/src/sentry/relay/config/experimental.py b/src/sentry/relay/config/experimental.py index d1e1cb227ecee..a9473d4f6e264 100644 --- a/src/sentry/relay/config/experimental.py +++ b/src/sentry/relay/config/experimental.py @@ -1,7 +1,7 @@ import logging from collections.abc import Callable, MutableMapping from datetime import datetime, timedelta, timezone -from typing import Any, Protocol, TypeVar +from typing import Any, Concatenate, ParamSpec, Protocol, TypeVar import sentry_sdk @@ -66,20 +66,18 @@ def add_experimental_config( R = TypeVar("R") -R_default = TypeVar("R_default") +P = ParamSpec("P") def build_safe_config( key: str, - function: Callable[..., R], - *args: Any, - default_return: R_default | None = None, - **kwargs: Any, -) -> R | R_default | None: + function: Callable[Concatenate[TimeChecker, P], R], + *args: P.args, + **kwargs: P.kwargs, +) -> R | None: """ Runs a config builder function with a timeout. - If the function call raises an exception, we log it to sentry and return value passed as - `default_return` parameter (by default this is `None`). + If the function call raises an exception, we log it to sentry and return None """ timeout = TimeChecker(_FEATURE_BUILD_TIMEOUT) @@ -95,4 +93,4 @@ def build_safe_config( except Exception: logger.exception("Exception while building Relay project config field") - return default_return + return None diff --git a/src/sentry/relay/config/metric_extraction.py b/src/sentry/relay/config/metric_extraction.py index d18cecc770f65..1980327348cae 100644 --- a/src/sentry/relay/config/metric_extraction.py +++ b/src/sentry/relay/config/metric_extraction.py @@ -112,14 +112,14 @@ def get_metric_extraction_config(project: Project) -> MetricExtractionConfig | N with sentry_sdk.start_span(op="get_on_demand_metric_specs"): alert_specs, widget_specs = build_safe_config( - "on_demand_metric_specs", get_on_demand_metric_specs, project, default_return=([], []) - ) # type: ignore[misc] + "on_demand_metric_specs", get_on_demand_metric_specs, project + ) or ([], []) with sentry_sdk.start_span(op="generate_span_attribute_specs"): - span_attr_specs = build_safe_config( - "span_attribute_specs", _generate_span_attribute_specs, project, default_return=[] + span_attr_specs = ( + build_safe_config("span_attribute_specs", _generate_span_attribute_specs, project) or [] ) with sentry_sdk.start_span(op="merge_metric_specs"): - metric_specs = _merge_metric_specs(alert_specs, widget_specs, span_attr_specs) # type: ignore[arg-type] + metric_specs = _merge_metric_specs(alert_specs, widget_specs, span_attr_specs) with sentry_sdk.start_span(op="get_extrapolation_config"): extrapolation_config = get_extrapolation_config(project) diff --git a/tests/sentry/relay/config/test_experimental.py b/tests/sentry/relay/config/test_experimental.py index 82cf78f57b192..e911397bf218d 100644 --- a/tests/sentry/relay/config/test_experimental.py +++ b/tests/sentry/relay/config/test_experimental.py @@ -82,20 +82,20 @@ def dummy2(*args, **kwargs): @patch("sentry.relay.config.experimental._FEATURE_BUILD_TIMEOUT", timedelta(seconds=1)) @patch("sentry.relay.config.experimental.logger.exception") -def test_build_safe_config_returns_default_value_on_timeout_exception(mock_logger): +def test_build_safe_config_returns_none_on_timeout_exception(mock_logger): def dummy(timeout: TimeChecker, *args, **kwargs): sleep(1) timeout.check() - result = build_safe_config("key", dummy, default_return="bar") + result = build_safe_config("key", dummy) - assert result == "bar" + assert result is None -def test_build_safe_config_returns_value_passed_as_arg_on_exception(): +def test_build_safe_config_returns_none_on_non_timeout_exception(): def dummy(*args, **kwargs): raise ValueError("foo") - result = build_safe_config("key", dummy, default_return="bar") + result = build_safe_config("key", dummy) - assert result == "bar" + assert result is None From 4846445060be4745b7c3f9aecdb1f5f427633c22 Mon Sep 17 00:00:00 2001 From: Joris Bayer Date: Thu, 1 Aug 2024 09:05:30 +0200 Subject: [PATCH 50/52] test(relay): Instrument flaky test (#75361) `test_histogram_outliers` became flaky after https://github.com/getsentry/relay/pull/3863. Add info to the assert to get more insights. --- tests/relay_integration/test_metrics_extraction.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/relay_integration/test_metrics_extraction.py b/tests/relay_integration/test_metrics_extraction.py index d865f4e65d0a8..f173c9a43de73 100644 --- a/tests/relay_integration/test_metrics_extraction.py +++ b/tests/relay_integration/test_metrics_extraction.py @@ -1,3 +1,4 @@ +import time import uuid import confluent_kafka as kafka @@ -148,11 +149,14 @@ def test_histogram_outliers(self): self.post_and_retrieve_event(event_data) histogram_outlier_tags = {} - for _ in range(1000): + buckets = [] + t0 = time.monotonic() + for attempt in range(1000): message = consumer.poll(timeout=1.0) if message is None: break bucket = json.loads(message.value()) + buckets.append(bucket) try: histogram_outlier_tags[bucket["name"]] = bucket["tags"]["histogram_outlier"] except KeyError: @@ -163,4 +167,8 @@ def test_histogram_outliers(self): "d:transactions/duration@millisecond": "inlier", "d:transactions/measurements.fcp@millisecond": "outlier", "d:transactions/measurements.lcp@millisecond": "inlier", + }, { + "attempts": attempt, + "time_elapsed": time.monotonic() - t0, + "bucket_count": len(buckets), } From d2a97378c74aa7bd85a16cfe513673c9d52b3c41 Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Thu, 1 Aug 2024 09:59:27 +0200 Subject: [PATCH 51/52] feat(metric): Add menu footer to the visualize field (#75339) --- static/app/components/comboBox/index.tsx | 22 ++++++++ .../metrics/mriSelect/index.spec.tsx | 50 ++++++++++++++++++- .../components/metrics/mriSelect/index.tsx | 41 +++++++++++++-- 3 files changed, 109 insertions(+), 4 deletions(-) diff --git a/static/app/components/comboBox/index.tsx b/static/app/components/comboBox/index.tsx index b7d41f44fa5c0..3bf0e0bde3e17 100644 --- a/static/app/components/comboBox/index.tsx +++ b/static/app/components/comboBox/index.tsx @@ -45,6 +45,13 @@ interface ComboBoxProps hiddenOptions?: Set; isLoading?: boolean; loadingMessage?: string; + /** + * Footer to be rendered at the bottom of the menu. + * @closeOverlay is a function that closes the menu + */ + menuFooter?: + | React.ReactNode + | ((actions: {closeOverlay: () => void}) => React.ReactNode); menuSize?: FormSize; menuWidth?: string; size?: FormSize; @@ -67,6 +74,7 @@ function ComboBox({ menuWidth, hiddenOptions, hasSearch, + menuFooter, ...props }: ComboBoxProps) { const theme = useTheme(); @@ -201,6 +209,13 @@ function ComboBox({ /> No items found

+ {menuFooter && ( + + {typeof menuFooter === 'function' + ? menuFooter({closeOverlay: state.close}) + : menuFooter} + + )} @@ -464,4 +479,11 @@ const StyledLoadingIndicator = styled(LoadingIndicator)` width: 12px; } `; + +const MenuFooter = styled('div')` + box-shadow: 0 -1px 0 ${p => p.theme.translucentInnerBorder}; + padding: ${space(1)} ${space(1.5)}; + z-index: 2; +`; + export {ControlledComboBox as ComboBox}; diff --git a/static/app/components/metrics/mriSelect/index.spec.tsx b/static/app/components/metrics/mriSelect/index.spec.tsx index 09b5e850de5b5..b041b4f3b3de1 100644 --- a/static/app/components/metrics/mriSelect/index.spec.tsx +++ b/static/app/components/metrics/mriSelect/index.spec.tsx @@ -1,6 +1,14 @@ +import {initializeOrg} from 'sentry-test/initializeOrg'; +import { + render, + renderGlobalModal, + screen, + userEvent, +} from 'sentry-test/reactTestingLibrary'; + import type {MetricMeta, UseCase} from 'sentry/types/metrics'; -import {getMetricsWithDuplicateNames} from '.'; +import {getMetricsWithDuplicateNames, MRISelect} from '.'; function createMetricMeta( name: string, @@ -72,4 +80,44 @@ describe('getMetricsWithDuplicateNames', () => { const result = getMetricsWithDuplicateNames(metrics); expect(result).toEqual(new Set([])); }); + + it('by clicking on the "create metric" button the metric modal shall be opened', async function () { + const {project, organization} = initializeOrg({ + organization: {features: ['metrics-new-inputs']}, + }); + + render( + , + { + organization, + } + ); + + renderGlobalModal(); + + await userEvent.click(screen.getByLabelText('Metric')); + await userEvent.click(screen.getByRole('button', {name: 'Create Metric'})); + expect(screen.getByText(/Don’t see your span attribute/)).toBeInTheDocument(); + + expect( + await screen.findByRole('heading', {name: 'Create Metric'}) + ).toBeInTheDocument(); + }); }); diff --git a/static/app/components/metrics/mriSelect/index.tsx b/static/app/components/metrics/mriSelect/index.tsx index 25e1d5e1d4e8d..436cdd02e7e01 100644 --- a/static/app/components/metrics/mriSelect/index.tsx +++ b/static/app/components/metrics/mriSelect/index.tsx @@ -1,13 +1,15 @@ import {memo, useCallback, useEffect, useMemo, useState} from 'react'; -import {css} from '@emotion/react'; +import {css, useTheme} from '@emotion/react'; import styled from '@emotion/styled'; +import {Button} from 'sentry/components/button'; import {ComboBox} from 'sentry/components/comboBox'; import type {ComboBoxOption} from 'sentry/components/comboBox/types'; import ProjectBadge from 'sentry/components/idBadge/projectBadge'; import {QueryFieldGroup} from 'sentry/components/metrics/queryFieldGroup'; -import {IconProject, IconWarning} from 'sentry/icons'; +import {IconAdd, IconInfo, IconProject, IconWarning} from 'sentry/icons'; import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; import type {MetricMeta, MRI} from 'sentry/types/metrics'; import {type Fuse, useFuzzySearch} from 'sentry/utils/fuzzySearch'; import { @@ -28,6 +30,7 @@ import {middleEllipsis} from 'sentry/utils/string/middleEllipsis'; import useKeyPress from 'sentry/utils/useKeyPress'; import useOrganization from 'sentry/utils/useOrganization'; import useProjects from 'sentry/utils/useProjects'; +import {openExtractionRuleCreateModal} from 'sentry/views/settings/projectMetrics/metricsExtractionRuleCreateModal'; import {MetricListItemDetails} from './metricListItemDetails'; @@ -155,6 +158,7 @@ export const MRISelect = memo(function MRISelect({ isLoading, value, }: MRISelectProps) { + const theme = useTheme(); const organization = useOrganization(); const {projects} = useProjects(); const mriMode = useMriMode(); @@ -315,7 +319,7 @@ export const MRISelect = memo(function MRISelect({ isLoading={isLoading} loadingMessage={t('Loading\u2026')} menuSize="sm" - menuWidth="450px" + menuWidth="450px" // TODO(priscilawebdev): update this value for small screens onChange={handleMRIChange} onInputChange={setInputValue} onOpenChange={onOpenMenu} @@ -324,6 +328,31 @@ export const MRISelect = memo(function MRISelect({ size="md" sizeLimit={100} value={value} + menuFooter={({closeOverlay}) => ( + + + + + {t('Don’t see your span attribute? Create Metric.')} + + + )} /> ); } @@ -356,3 +385,9 @@ export const MRISelect = memo(function MRISelect({ const CustomMetricInfoText = styled('span')` color: ${p => p.theme.subText}; `; + +const FlexBlock = styled('div')` + display: flex; + justify-content: space-between; + align-items: center; +`; From 6078b8e416b3415d2632a4d02d8927679b835712 Mon Sep 17 00:00:00 2001 From: Sigrid Huemer <32902192+s1gr1d@users.noreply.github.com> Date: Thu, 1 Aug 2024 11:28:15 +0200 Subject: [PATCH 52/52] ref(onboarding): Change wording to "Tracing" (#75331) Changes the wording of "Performance Monitoring" to "Tracing". Also had to change `getsentry` Tests for this: https://github.com/getsentry/getsentry/pull/14765 Instead of renaming every occurrence in the codebase, only the occurrences in "Onboarding" are changed. Previous, unmerged PRs: - Frontend https://github.com/getsentry/sentry/pull/75223 - Backend: https://github.com/getsentry/sentry/pull/75224 requires https://github.com/getsentry/getsentry/pull/14765 Co-authored-by: Priscila Oliveira --- .../onboarding/productSelection.spec.tsx | 28 +++++++++---------- .../onboarding/productSelection.tsx | 11 +++----- 2 files changed, 17 insertions(+), 22 deletions(-) diff --git a/static/app/components/onboarding/productSelection.spec.tsx b/static/app/components/onboarding/productSelection.spec.tsx index fbcab1e4b5756..97fb76c6d11ff 100644 --- a/static/app/components/onboarding/productSelection.spec.tsx +++ b/static/app/components/onboarding/productSelection.spec.tsx @@ -60,18 +60,18 @@ describe('Onboarding Product Selection', function () { await userEvent.click(screen.getByRole('checkbox', {name: 'Error Monitoring'})); await waitFor(() => expect(router.push).not.toHaveBeenCalled()); - // Performance monitoring shall be checked and enabled by default - expect(screen.getByRole('checkbox', {name: 'Performance Monitoring'})).toBeChecked(); - expect(screen.getByRole('checkbox', {name: 'Performance Monitoring'})).toBeEnabled(); + // Tracing shall be checked and enabled by default + expect(screen.getByRole('checkbox', {name: 'Tracing'})).toBeChecked(); + expect(screen.getByRole('checkbox', {name: 'Tracing'})).toBeEnabled(); // Tooltip with explanation shall be displayed on hover - await userEvent.hover(screen.getByRole('checkbox', {name: 'Performance Monitoring'})); + await userEvent.hover(screen.getByRole('checkbox', {name: 'Tracing'})); expect( await screen.findByText(/Automatic performance issue detection/) ).toBeInTheDocument(); - // Uncheck performance monitoring - await userEvent.click(screen.getByRole('checkbox', {name: 'Performance Monitoring'})); + // Uncheck tracing + await userEvent.click(screen.getByRole('checkbox', {name: 'Tracing'})); await waitFor(() => expect(router.replace).toHaveBeenCalledWith({ pathname: undefined, @@ -172,12 +172,10 @@ describe('Onboarding Product Selection', function () { } ); - // Performance Monitoring shall be unchecked and disabled by default - expect(screen.getByRole('checkbox', {name: 'Performance Monitoring'})).toBeDisabled(); - expect( - screen.getByRole('checkbox', {name: 'Performance Monitoring'}) - ).not.toBeChecked(); - await userEvent.hover(screen.getByRole('checkbox', {name: 'Performance Monitoring'})); + // Tracing shall be unchecked and disabled by default + expect(screen.getByRole('checkbox', {name: 'Tracing'})).toBeDisabled(); + expect(screen.getByRole('checkbox', {name: 'Tracing'})).not.toBeChecked(); + await userEvent.hover(screen.getByRole('checkbox', {name: 'Tracing'})); // A tooltip with explanation why the option is disabled shall be displayed on hover expect( @@ -185,9 +183,9 @@ describe('Onboarding Product Selection', function () { disabledProducts[ProductSolution.PERFORMANCE_MONITORING].reason ) ).toBeInTheDocument(); - await userEvent.click(screen.getByRole('checkbox', {name: 'Performance Monitoring'})); + await userEvent.click(screen.getByRole('checkbox', {name: 'Tracing'})); - // Try to uncheck performance monitoring + // Try to uncheck tracing await waitFor(() => expect(router.push).not.toHaveBeenCalled()); }); @@ -272,7 +270,7 @@ describe('Onboarding Product Selection', function () { expect(screen.getByRole('checkbox', {name: 'Error Monitoring'})).toBeEnabled(); - expect(screen.getByRole('checkbox', {name: 'Performance Monitoring'})).toBeDisabled(); + expect(screen.getByRole('checkbox', {name: 'Tracing'})).toBeDisabled(); expect(screen.getByRole('checkbox', {name: 'Session Replay'})).toBeDisabled(); }); diff --git a/static/app/components/onboarding/productSelection.tsx b/static/app/components/onboarding/productSelection.tsx index ebd1201d277ec..529e5a218cce1 100644 --- a/static/app/components/onboarding/productSelection.tsx +++ b/static/app/components/onboarding/productSelection.tsx @@ -69,10 +69,7 @@ function getDisabledProducts(organization: Organization): DisabledProducts { if (!hasPerformance) { disabledProducts[ProductSolution.PERFORMANCE_MONITORING] = { reason, - onClick: createClickHandler( - 'organizations:performance-view', - 'Performance Monitoring' - ), + onClick: createClickHandler('organizations:performance-view', 'Tracing'), }; } if (!hasProfiling) { @@ -358,7 +355,7 @@ export function ProductSelection({ ); if (defaultProducts?.includes(ProductSolution.PROFILING)) { - // Ensure that if profiling is enabled, performance monitoring is also enabled + // Ensure that if profiling is enabled, tracing is also enabled if ( product === ProductSolution.PROFILING && newProduct.has(ProductSolution.PROFILING) @@ -434,7 +431,7 @@ export function ProductSelection({ /> {products.includes(ProductSolution.PERFORMANCE_MONITORING) && ( , }