diff --git a/src/modules/core/sentry/universal.ts b/src/modules/core/sentry/universal.ts index 8b9dc832..b4c4d45f 100644 --- a/src/modules/core/sentry/universal.ts +++ b/src/modules/core/sentry/universal.ts @@ -1,6 +1,13 @@ +import { createLogger } from '@/modules/core/logging/logger'; +import { FLUSH_TIMEOUT } from '@/modules/core/sentry/config'; import { UserSession } from '@/modules/core/userSession/useUserSession'; import * as Sentry from '@sentry/node'; +const fileLabel = 'modules/core/sentry/universal'; +const logger = createLogger({ + fileLabel, +}); + /** * Configure Sentry tags related to the current user. * @@ -35,3 +42,22 @@ export const configureSentryI18n = (lang: string, locale: string): void => { }); } }; + +/** + * Flushes Sentry queue in a safe way. + * + * It's necessary to flush all Sentry events on the server, because Vercel runs on AWS Lambda, see https://vercel.com/docs/platform/limits#streaming-responses + * If you don't flush, then it's possible the Sentry events won't be sent. + * This helper is meant to be used for backend-only usage. (not frontend) + * + * There is a potential bug in Sentry that throws an exception when flushing times out, causing API endpoints to fail. + * @see https://github.com/getsentry/sentry/issues/26870 + */ +export const flushSafe = async (): Promise => { + try { + return await Sentry.flush(FLUSH_TIMEOUT); + } catch (e) { + logger.error(`[flushSafe] An exception was thrown while running Sentry.flush()`, e); + return false; + } +}; diff --git a/src/pages/_error.tsx b/src/pages/_error.tsx index 699dd384..960ba810 100644 --- a/src/pages/_error.tsx +++ b/src/pages/_error.tsx @@ -1,4 +1,4 @@ -import { FLUSH_TIMEOUT } from '@/modules/core/sentry/config'; +import { flushSafe } from '@/modules/core/sentry/universal'; import * as Sentry from '@sentry/node'; import { NextPageContext } from 'next'; import NextError, { ErrorProps as NextErrorProps } from 'next/error'; @@ -126,8 +126,7 @@ ErrorPage.getInitialProps = async (props: NextPageContext): Promise if (err) { Sentry.captureException(err); - // It's necessary to flush all events when running on the server, because Vercel runs on AWS Lambda, see https://vercel.com/docs/platform/limits#streaming-responses - await Sentry.flush(FLUSH_TIMEOUT); + await flushSafe(); return errorInitialProps; } @@ -155,8 +154,7 @@ ErrorPage.getInitialProps = async (props: NextPageContext): Promise new Error(`_error.js getInitialProps missing data at path: ${asPath}`), ); - // It's necessary to flush all events when running on the server, because Vercel runs on AWS Lambda, see https://vercel.com/docs/platform/limits#streaming-responses - await Sentry.flush(FLUSH_TIMEOUT); + await flushSafe(); return errorInitialProps; }; diff --git a/src/pages/api/error.ts b/src/pages/api/error.ts index 57fda585..d4c62579 100644 --- a/src/pages/api/error.ts +++ b/src/pages/api/error.ts @@ -4,9 +4,9 @@ import { AMPLITUDE_EVENTS, } from '@/modules/core/amplitude/events'; import { createLogger } from '@/modules/core/logging/logger'; -import { FLUSH_TIMEOUT } from '@/modules/core/sentry/config'; import Sentry from '@/modules/core/sentry/init'; import { configureReq } from '@/modules/core/sentry/server'; +import { flushSafe } from '@/modules/core/sentry/universal'; import { NextApiRequest, NextApiResponse, @@ -40,8 +40,7 @@ export const error = async (req: NextApiRequest, res: NextApiResponse): Promise< Sentry.captureException(e); logger.error(e.message); - // It's necessary to flush all events because Vercel runs on AWS Lambda, see https://vercel.com/docs/platform/limits#streaming-responses - await Sentry.flush(FLUSH_TIMEOUT); + await flushSafe(); res.json({ error: true, diff --git a/src/pages/api/preview.ts b/src/pages/api/preview.ts index da506394..7b91aa20 100644 --- a/src/pages/api/preview.ts +++ b/src/pages/api/preview.ts @@ -5,9 +5,9 @@ import { } from '@/modules/core/amplitude/events'; import { filterExternalAbsoluteUrl } from '@/modules/core/js/url'; import { createLogger } from '@/modules/core/logging/logger'; -import { FLUSH_TIMEOUT } from '@/modules/core/sentry/config'; import Sentry from '@/modules/core/sentry/init'; import { configureReq } from '@/modules/core/sentry/server'; +import { flushSafe } from '@/modules/core/sentry/universal'; import appendQueryParameter from 'append-query'; import { NextApiRequest, @@ -103,8 +103,7 @@ export const preview = async (req: EndpointRequest, res: NextApiResponse): Promi Sentry.captureMessage('Preview mode is not allowed in production', Sentry.Severity.Warning); } - // It's necessary to flush all events because Vercel runs on AWS Lambda, see https://vercel.com/docs/platform/limits#streaming-responses - await Sentry.flush(FLUSH_TIMEOUT); + await flushSafe(); res.writeHead(307, { Location: safeRedirectUrl }); res.end(); @@ -112,8 +111,7 @@ export const preview = async (req: EndpointRequest, res: NextApiResponse): Promi Sentry.captureException(e); logger.error(e.message); - // It's necessary to flush all events because Vercel runs on AWS Lambda, see https://vercel.com/docs/platform/limits#streaming-responses - await Sentry.flush(FLUSH_TIMEOUT); + await flushSafe(); res.json({ error: true, diff --git a/src/pages/api/status.ts b/src/pages/api/status.ts index e03126f3..89f7c7de 100644 --- a/src/pages/api/status.ts +++ b/src/pages/api/status.ts @@ -4,9 +4,9 @@ import { AMPLITUDE_EVENTS, } from '@/modules/core/amplitude/events'; import { createLogger } from '@/modules/core/logging/logger'; -import { FLUSH_TIMEOUT } from '@/modules/core/sentry/config'; import Sentry from '@/modules/core/sentry/init'; import { configureReq } from '@/modules/core/sentry/server'; +import { flushSafe } from '@/modules/core/sentry/universal'; import { NextApiRequest, NextApiResponse, @@ -63,8 +63,7 @@ export const status = async (req: NextApiRequest, res: NextApiResponse): Promise Sentry.captureException(e); logger.error(e.message); - // It's necessary to flush all events because Vercel runs on AWS Lambda, see https://vercel.com/docs/platform/limits#streaming-responses - await Sentry.flush(FLUSH_TIMEOUT); + await flushSafe(); res.json({ error: true, diff --git a/src/pages/api/webhooks/deploymentCompleted.ts b/src/pages/api/webhooks/deploymentCompleted.ts index f5891cd0..17122d0f 100644 --- a/src/pages/api/webhooks/deploymentCompleted.ts +++ b/src/pages/api/webhooks/deploymentCompleted.ts @@ -5,12 +5,10 @@ import { } from '@/modules/core/amplitude/events'; import { convertRequestBodyToJSObject } from '@/modules/core/api/convertRequestBodyToJSObject'; import { createLogger } from '@/modules/core/logging/logger'; -import { - ALERT_TYPES, - FLUSH_TIMEOUT, -} from '@/modules/core/sentry/config'; +import { ALERT_TYPES } from '@/modules/core/sentry/config'; import Sentry from '@/modules/core/sentry/init'; import { configureReq } from '@/modules/core/sentry/server'; +import { flushSafe } from '@/modules/core/sentry/universal'; import { NextApiRequest, NextApiResponse, @@ -120,8 +118,7 @@ export const deploymentCompleted = async (req: EndpointRequest, res: NextApiResp }); }); - // It's necessary to flush all events because Vercel runs on AWS Lambda, see https://vercel.com/docs/platform/limits#streaming-responses - await Sentry.flush(FLUSH_TIMEOUT); + await flushSafe(); res.status(200); res.end(); @@ -129,8 +126,7 @@ export const deploymentCompleted = async (req: EndpointRequest, res: NextApiResp Sentry.captureException(e); logger.error(e.message); - // It's necessary to flush all events because Vercel runs on AWS Lambda, see https://vercel.com/docs/platform/limits#streaming-responses - await Sentry.flush(FLUSH_TIMEOUT); + await flushSafe(); res.status(500); res.end();