Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Update Sentry telemetry to v8 #604

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions packages/bundler-plugin-core/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,9 @@
"@rollup/plugin-replace": "^4.0.0",
"@sentry-internal/eslint-config": "2.22.4",
"@sentry-internal/sentry-bundler-plugin-tsconfig": "2.22.4",
"@sentry/node": "7.102.0",
"@sentry/utils": "7.102.0",
"@sentry/core": "8.30.0",
"@sentry/types": "8.30.0",
"@sentry/utils": "8.30.0",
"@swc/core": "^1.2.205",
"@swc/jest": "^0.2.21",
"@types/jest": "^28.1.3",
Expand Down
274 changes: 134 additions & 140 deletions packages/bundler-plugin-core/src/debug-id-upload.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,16 @@ import path from "path";
import * as util from "util";
import { Logger } from "./sentry/logger";
import { promisify } from "util";
import { Hub, NodeClient } from "@sentry/node";
import SentryCli from "@sentry/cli";
import { dynamicSamplingContextToSentryBaggageHeader } from "@sentry/utils";
import { safeFlushTelemetry } from "./sentry/telemetry";
import { stripQueryAndHashFromPath } from "./utils";
import { setMeasurement, spanToTraceHeader, startSpan } from "@sentry/core";
import { getDynamicSamplingContextFromSpan, Scope } from "@sentry/core";
import { Client } from "@sentry/types";

interface RewriteSourcesHook {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(source: string, map: any): string;
}

Expand All @@ -23,8 +26,8 @@ interface DebugIdUploadPluginOptions {
dist?: string;
rewriteSourcesHook?: RewriteSourcesHook;
handleRecoverableError: (error: unknown) => void;
sentryHub: Hub;
sentryClient: NodeClient;
sentryScope: Scope;
sentryClient: Client;
sentryCliOptions: {
url: string;
authToken: string;
Expand All @@ -44,7 +47,7 @@ export function createDebugIdUploadFunction({
releaseName,
dist,
handleRecoverableError,
sentryHub,
sentryScope,
sentryClient,
sentryCliOptions,
rewriteSourcesHook,
Expand All @@ -53,155 +56,146 @@ export function createDebugIdUploadFunction({
const freeGlobalDependencyOnSourcemapFiles = createDependencyOnSourcemapFiles();

return async (buildArtifactPaths: string[]) => {
const artifactBundleUploadTransaction = sentryHub.startTransaction({
name: "debug-id-sourcemap-upload",
});

let folderToCleanUp: string | undefined;

// It is possible that this writeBundle hook (which calls this function) is called multiple times in one build (for example when reusing the plugin, or when using build tooling like `@vitejs/plugin-legacy`)
// Therefore we need to actually register the execution of this hook as dependency on the sourcemap files.
const freeUploadDependencyOnSourcemapFiles = createDependencyOnSourcemapFiles();
await startSpan({ name: "debug-id-sourcemap-upload", scope: sentryScope }, async () => {
let folderToCleanUp: string | undefined;

try {
const mkdtempSpan = artifactBundleUploadTransaction.startChild({ description: "mkdtemp" });
const tmpUploadFolder = await fs.promises.mkdtemp(
path.join(os.tmpdir(), "sentry-bundler-plugin-upload-")
);
mkdtempSpan.finish();

folderToCleanUp = tmpUploadFolder;

let globAssets;
if (assets) {
globAssets = assets;
} else {
logger.debug(
"No `sourcemaps.assets` option provided, falling back to uploading detected build artifacts."
);
globAssets = buildArtifactPaths;
}
// It is possible that this writeBundle hook (which calls this function) is called multiple times in one build (for example when reusing the plugin, or when using build tooling like `@vitejs/plugin-legacy`)
// Therefore we need to actually register the execution of this hook as dependency on the sourcemap files.
const freeUploadDependencyOnSourcemapFiles = createDependencyOnSourcemapFiles();

const globSpan = artifactBundleUploadTransaction.startChild({ description: "glob" });
const globResult = await glob(globAssets, {
absolute: true,
nodir: true,
ignore: ignore,
});
globSpan.finish();

const debugIdChunkFilePaths = globResult.filter((debugIdChunkFilePath) => {
return !!stripQueryAndHashFromPath(debugIdChunkFilePath).match(/\.(js|mjs|cjs)$/);
});

// The order of the files output by glob() is not deterministic
// Ensure order within the files so that {debug-id}-{chunkIndex} coupling is consistent
debugIdChunkFilePaths.sort();

if (Array.isArray(assets) && assets.length === 0) {
logger.debug(
"Empty `sourcemaps.assets` option provided. Will not upload sourcemaps with debug ID."
);
} else if (debugIdChunkFilePaths.length === 0) {
logger.warn(
"Didn't find any matching sources for debug ID upload. Please check the `sourcemaps.assets` option."
);
} else {
const prepareSpan = artifactBundleUploadTransaction.startChild({
description: "prepare-bundles",
});

// Preparing the bundles can be a lot of work and doing it all at once has the potential of nuking the heap so
// instead we do it with a maximum of 16 concurrent workers
const preparationTasks = debugIdChunkFilePaths.map(
(chunkFilePath, chunkIndex) => async () => {
await prepareBundleForDebugIdUpload(
chunkFilePath,
tmpUploadFolder,
chunkIndex,
logger,
rewriteSourcesHook ?? defaultRewriteSourcesHook
try {
const tmpUploadFolder = await startSpan(
{ name: "mkdtemp", scope: sentryScope },
async () => {
return await fs.promises.mkdtemp(
path.join(os.tmpdir(), "sentry-bundler-plugin-upload-")
);
}
);
const workers: Promise<void>[] = [];
const worker = async () => {
while (preparationTasks.length > 0) {
const task = preparationTasks.shift();
if (task) {
await task();
}
}
};
for (let workerIndex = 0; workerIndex < 16; workerIndex++) {
workers.push(worker());
}
await Promise.all(workers);

prepareSpan.finish();
folderToCleanUp = tmpUploadFolder;

const files = await fs.promises.readdir(tmpUploadFolder);
const stats = files.map((file) => fs.promises.stat(path.join(tmpUploadFolder, file)));
const uploadSize = (await Promise.all(stats)).reduce(
(accumulator, { size }) => accumulator + size,
0
);

artifactBundleUploadTransaction.setMeasurement("files", files.length, "none");
artifactBundleUploadTransaction.setMeasurement("upload_size", uploadSize, "byte");
let globAssets: string | string[];
if (assets) {
globAssets = assets;
} else {
logger.debug(
"No `sourcemaps.assets` option provided, falling back to uploading detected build artifacts."
);
globAssets = buildArtifactPaths;
}

const uploadSpan = artifactBundleUploadTransaction.startChild({
description: "upload",
});
const globResult = await startSpan(
{ name: "glob", scope: sentryScope },
async () => await glob(globAssets, { absolute: true, nodir: true, ignore: ignore })
);

const cliInstance = new SentryCli(null, {
...sentryCliOptions,
headers: {
"sentry-trace": uploadSpan.toTraceparent(),
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
baggage: dynamicSamplingContextToSentryBaggageHeader(
artifactBundleUploadTransaction.getDynamicSamplingContext()
)!,
...sentryCliOptions.headers,
},
const debugIdChunkFilePaths = globResult.filter((debugIdChunkFilePath) => {
return !!stripQueryAndHashFromPath(debugIdChunkFilePath).match(/\.(js|mjs|cjs)$/);
});

await cliInstance.releases.uploadSourceMaps(
releaseName ?? "undefined", // unfortunetly this needs a value for now but it will not matter since debug IDs overpower releases anyhow
{
include: [
{
paths: [tmpUploadFolder],
rewrite: false,
dist: dist,
},
],
useArtifactBundle: true,
}
);
// The order of the files output by glob() is not deterministic
// Ensure order within the files so that {debug-id}-{chunkIndex} coupling is consistent
debugIdChunkFilePaths.sort();

if (Array.isArray(assets) && assets.length === 0) {
logger.debug(
"Empty `sourcemaps.assets` option provided. Will not upload sourcemaps with debug ID."
);
} else if (debugIdChunkFilePaths.length === 0) {
logger.warn(
"Didn't find any matching sources for debug ID upload. Please check the `sourcemaps.assets` option."
);
} else {
await startSpan(
{ name: "prepare-bundles", scope: sentryScope },
async (prepBundlesSpan) => {
// Preparing the bundles can be a lot of work and doing it all at once has the potential of nuking the heap so
// instead we do it with a maximum of 16 concurrent workers
const preparationTasks = debugIdChunkFilePaths.map(
(chunkFilePath, chunkIndex) => async () => {
await prepareBundleForDebugIdUpload(
chunkFilePath,
tmpUploadFolder,
chunkIndex,
logger,
rewriteSourcesHook ?? defaultRewriteSourcesHook
);
}
);
const workers: Promise<void>[] = [];
const worker = async () => {
while (preparationTasks.length > 0) {
const task = preparationTasks.shift();
if (task) {
await task();
}
}
};
for (let workerIndex = 0; workerIndex < 16; workerIndex++) {
workers.push(worker());
}

await Promise.all(workers);

const files = await fs.promises.readdir(tmpUploadFolder);
const stats = files.map((file) => fs.promises.stat(path.join(tmpUploadFolder, file)));
const uploadSize = (await Promise.all(stats)).reduce(
(accumulator, { size }) => accumulator + size,
0
);

setMeasurement("files", files.length, "none", prepBundlesSpan);
setMeasurement("upload_size", uploadSize, "byte", prepBundlesSpan);

await startSpan({ name: "upload", scope: sentryScope }, async (uploadSpan) => {
const cliInstance = new SentryCli(null, {
...sentryCliOptions,
headers: {
"sentry-trace": spanToTraceHeader(uploadSpan),
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
baggage: dynamicSamplingContextToSentryBaggageHeader(
getDynamicSamplingContextFromSpan(uploadSpan)
)!,
...sentryCliOptions.headers,
},
});

await cliInstance.releases.uploadSourceMaps(
releaseName ?? "undefined", // unfortunetly this needs a value for now but it will not matter since debug IDs overpower releases anyhow
{
include: [
{
paths: [tmpUploadFolder],
rewrite: false,
dist: dist,
},
],
useArtifactBundle: true,
}
);
});
}
);

uploadSpan.finish();
logger.info("Successfully uploaded source maps to Sentry");
}
} catch (e) {
sentryHub.withScope((scope) => {
scope.setSpan(artifactBundleUploadTransaction);
sentryHub.captureException('Error in "debugIdUploadPlugin" writeBundle hook');
});
handleRecoverableError(e);
} finally {
if (folderToCleanUp) {
const cleanupSpan = artifactBundleUploadTransaction.startChild({
description: "cleanup",
});
void fs.promises.rm(folderToCleanUp, { recursive: true, force: true });
cleanupSpan.finish();
logger.info("Successfully uploaded source maps to Sentry");
}
} catch (e) {
sentryScope.captureException('Error in "debugIdUploadPlugin" writeBundle hook');
handleRecoverableError(e);
} finally {
if (folderToCleanUp) {
void startSpan({ name: "cleanup", scope: sentryScope }, async () => {
if (folderToCleanUp) {
await fs.promises.rm(folderToCleanUp, { recursive: true, force: true });
}
});
}
freeGlobalDependencyOnSourcemapFiles();
freeUploadDependencyOnSourcemapFiles();
await safeFlushTelemetry(sentryClient);
}
artifactBundleUploadTransaction.finish();
freeGlobalDependencyOnSourcemapFiles();
freeUploadDependencyOnSourcemapFiles();
await safeFlushTelemetry(sentryClient);
}
});
};
}

Expand Down
Loading
Loading