From ce73f268550cb0bf121ac3be646560641fb9c33b Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Fri, 13 Sep 2024 16:30:55 +0200 Subject: [PATCH] Prepare support for Dynamic Instrumentation (#4492) Prepare the Node.js tracer to be used with the Dynamic Instrumentation product (DI). Probes are added as breakpoints using the V8 Inspector Protocol. The inspector runs in a worker thread and will pause the main thread temporarily while gathering the required information related to the probe (as of this commit no information is gathered, but this is a required step once we start to gather the local state snapshot). DI features included in this commit: - Support for line based log probes - Support for loading probes via Remote Configuration - Support for sending probe status to the debugger-backend - Support for sending log data to debugger-backend Please note, that with this commit it's still not possible to use DI with the Node.js tracer as support for using the Node.js tracer with DI has not yet been enabled in the Datadog UI. --- .github/workflows/debugger.yml | 33 ++ integration-tests/debugger/index.spec.js | 451 ++++++++++++++++++ .../debugger/target-app/index.js | 18 + integration-tests/helpers/fake-agent.js | 40 +- integration-tests/helpers/index.js | 20 + package.json | 3 + packages/dd-trace/src/config.js | 13 +- .../src/debugger/devtools_client/config.js | 24 + .../src/debugger/devtools_client/index.js | 57 +++ .../inspector_promises_polyfill.js | 23 + .../debugger/devtools_client/remote_config.js | 164 +++++++ .../src/debugger/devtools_client/send.js | 28 ++ .../src/debugger/devtools_client/session.js | 7 + .../src/debugger/devtools_client/state.js | 47 ++ .../src/debugger/devtools_client/status.js | 109 +++++ packages/dd-trace/src/debugger/index.js | 92 ++++ packages/dd-trace/src/proxy.js | 6 + packages/dd-trace/test/config.spec.js | 11 + .../debugger/devtools_client/status.spec.js | 102 ++++ 19 files changed, 1236 insertions(+), 12 deletions(-) create mode 100644 .github/workflows/debugger.yml create mode 100644 integration-tests/debugger/index.spec.js create mode 100644 integration-tests/debugger/target-app/index.js create mode 100644 packages/dd-trace/src/debugger/devtools_client/config.js create mode 100644 packages/dd-trace/src/debugger/devtools_client/index.js create mode 100644 packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js create mode 100644 packages/dd-trace/src/debugger/devtools_client/remote_config.js create mode 100644 packages/dd-trace/src/debugger/devtools_client/send.js create mode 100644 packages/dd-trace/src/debugger/devtools_client/session.js create mode 100644 packages/dd-trace/src/debugger/devtools_client/state.js create mode 100644 packages/dd-trace/src/debugger/devtools_client/status.js create mode 100644 packages/dd-trace/src/debugger/index.js create mode 100644 packages/dd-trace/test/debugger/devtools_client/status.spec.js diff --git a/.github/workflows/debugger.yml b/.github/workflows/debugger.yml new file mode 100644 index 00000000000..b9543148382 --- /dev/null +++ b/.github/workflows/debugger.yml @@ -0,0 +1,33 @@ +name: Debugger + +on: + pull_request: + push: + branches: [master] + schedule: + - cron: '0 4 * * *' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref || github.run_id }} + cancel-in-progress: true + +jobs: + ubuntu: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/testagent/start + - uses: ./.github/actions/node/setup + - uses: ./.github/actions/install + - uses: ./.github/actions/node/18 + - run: yarn test:debugger:ci + - run: yarn test:integration:debugger + - uses: ./.github/actions/node/20 + - run: yarn test:debugger:ci + - run: yarn test:integration:debugger + - uses: ./.github/actions/node/latest + - run: yarn test:debugger:ci + - run: yarn test:integration:debugger + - if: always() + uses: ./.github/actions/testagent/logs + - uses: codecov/codecov-action@v3 diff --git a/integration-tests/debugger/index.spec.js b/integration-tests/debugger/index.spec.js new file mode 100644 index 00000000000..db14929a67f --- /dev/null +++ b/integration-tests/debugger/index.spec.js @@ -0,0 +1,451 @@ +'use strict' + +const path = require('path') +const { randomUUID } = require('crypto') +const getPort = require('get-port') +const Axios = require('axios') +const { assert } = require('chai') +const { assertObjectContains, assertUUID, createSandbox, FakeAgent, spawnProc } = require('../helpers') +const { ACKNOWLEDGED, ERROR } = require('../../packages/dd-trace/src/appsec/remote_config/apply_states') +const { version } = require('../../package.json') + +const probeFile = 'debugger/target-app/index.js' +const probeLineNo = 9 +const pollInterval = 1 + +describe('Dynamic Instrumentation', function () { + let axios, sandbox, cwd, appPort, appFile, agent, proc, probeConfig + + before(async function () { + sandbox = await createSandbox(['fastify']) + cwd = sandbox.folder + appFile = path.join(cwd, ...probeFile.split('/')) + }) + + after(async function () { + await sandbox.remove() + }) + + beforeEach(async function () { + const probeId = randomUUID() + probeConfig = { + product: 'LIVE_DEBUGGING', + id: `logProbe_${probeId}`, + config: generateProbeConfig({ id: probeId }) + } + appPort = await getPort() + agent = await new FakeAgent().start() + proc = await spawnProc(appFile, { + cwd, + env: { + APP_PORT: appPort, + DD_TRACE_AGENT_PORT: agent.port, + DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS: pollInterval, + DD_DYNAMIC_INSTRUMENTATION_ENABLED: true + } + }) + axios = Axios.create({ + baseURL: `http://localhost:${appPort}` + }) + }) + + afterEach(async function () { + proc.kill() + await agent.stop() + }) + + it('base case: target app should work as expected if no test probe has been added', async function () { + const response = await axios.get('/foo') + assert.strictEqual(response.status, 200) + assert.deepStrictEqual(response.data, { hello: 'foo' }) + }) + + describe('diagnostics messages', function () { + it('should send expected diagnostics messages if probe is received and triggered', function (done) { + let receivedAckUpdate = false + const probeId = probeConfig.config.id + const expectedPayloads = [{ + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 0, status: 'RECEIVED' } } + }, { + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 0, status: 'INSTALLED' } } + }, { + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 0, status: 'EMITTING' } } + }] + + agent.on('remote-config-ack-update', (id, version, state, error) => { + assert.strictEqual(id, probeConfig.id) + assert.strictEqual(version, 1) + assert.strictEqual(state, ACKNOWLEDGED) + assert.notOk(error) // falsy check since error will be an empty string, but that's an implementation detail + + receivedAckUpdate = true + endIfDone() + }) + + agent.on('debugger-diagnostics', async ({ payload }) => { + try { + const expected = expectedPayloads.shift() + assertObjectContains(payload, expected) + assertUUID(payload.debugger.diagnostics.runtimeId) + + if (payload.debugger.diagnostics.status === 'INSTALLED') { + const response = await axios.get('/foo') + assert.strictEqual(response.status, 200) + assert.deepStrictEqual(response.data, { hello: 'foo' }) + } + + endIfDone() + } catch (err) { + // Nessecary hack: Any errors thrown inside of an async function is invisible to Mocha unless the outer `it` + // callback is also `async` (which we can't do in this case since we rely on the `done` callback). + done(err) + } + }) + + agent.addRemoteConfig(probeConfig) + + function endIfDone () { + if (receivedAckUpdate && expectedPayloads.length === 0) done() + } + }) + + it('should send expected diagnostics messages if probe is first received and then updated', function (done) { + let receivedAckUpdates = 0 + const probeId = probeConfig.config.id + const expectedPayloads = [{ + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 0, status: 'RECEIVED' } } + }, { + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 0, status: 'INSTALLED' } } + }, { + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 1, status: 'RECEIVED' } } + }, { + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 1, status: 'INSTALLED' } } + }] + const triggers = [ + () => { + probeConfig.config.version++ + agent.updateRemoteConfig(probeConfig.id, probeConfig.config) + }, + () => {} + ] + + agent.on('remote-config-ack-update', (id, version, state, error) => { + assert.strictEqual(id, probeConfig.id) + assert.strictEqual(version, ++receivedAckUpdates) + assert.strictEqual(state, ACKNOWLEDGED) + assert.notOk(error) // falsy check since error will be an empty string, but that's an implementation detail + + endIfDone() + }) + + agent.on('debugger-diagnostics', ({ payload }) => { + const expected = expectedPayloads.shift() + assertObjectContains(payload, expected) + assertUUID(payload.debugger.diagnostics.runtimeId) + if (payload.debugger.diagnostics.status === 'INSTALLED') triggers.shift()() + endIfDone() + }) + + agent.addRemoteConfig(probeConfig) + + function endIfDone () { + if (receivedAckUpdates === 2 && expectedPayloads.length === 0) done() + } + }) + + it('should send expected diagnostics messages if probe is first received and then deleted', function (done) { + let receivedAckUpdate = false + let payloadsProcessed = false + const probeId = probeConfig.config.id + const expectedPayloads = [{ + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 0, status: 'RECEIVED' } } + }, { + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 0, status: 'INSTALLED' } } + }] + + agent.on('remote-config-ack-update', (id, version, state, error) => { + assert.strictEqual(id, probeConfig.id) + assert.strictEqual(version, 1) + assert.strictEqual(state, ACKNOWLEDGED) + assert.notOk(error) // falsy check since error will be an empty string, but that's an implementation detail + + receivedAckUpdate = true + endIfDone() + }) + + agent.on('debugger-diagnostics', ({ payload }) => { + const expected = expectedPayloads.shift() + assertObjectContains(payload, expected) + assertUUID(payload.debugger.diagnostics.runtimeId) + + if (payload.debugger.diagnostics.status === 'INSTALLED') { + agent.removeRemoteConfig(probeConfig.id) + // Wait a little to see if we get any follow-up `debugger-diagnostics` messages + setTimeout(() => { + payloadsProcessed = true + endIfDone() + }, pollInterval * 2 * 1000) // wait twice as long as the RC poll interval + } + }) + + agent.addRemoteConfig(probeConfig) + + function endIfDone () { + if (receivedAckUpdate && payloadsProcessed) done() + } + }) + + const unsupporedOrInvalidProbes = [[ + 'should send expected error diagnostics messages if probe doesn\'t conform to expected schema', + 'bad config!!!', + { status: 'ERROR' } + ], [ + 'should send expected error diagnostics messages if probe type isn\'t supported', + generateProbeConfig({ type: 'INVALID_PROBE' }) + ], [ + 'should send expected error diagnostics messages if it isn\'t a line-probe', + generateProbeConfig({ where: { foo: 'bar' } }) // TODO: Use valid schema for method probe instead + ]] + + for (const [title, config, customErrorDiagnosticsObj] of unsupporedOrInvalidProbes) { + it(title, function (done) { + let receivedAckUpdate = false + + agent.on('remote-config-ack-update', (id, version, state, error) => { + assert.strictEqual(id, `logProbe_${config.id}`) + assert.strictEqual(version, 1) + assert.strictEqual(state, ERROR) + assert.strictEqual(error.slice(0, 6), 'Error:') + + receivedAckUpdate = true + endIfDone() + }) + + const probeId = config.id + const expectedPayloads = [{ + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { status: 'RECEIVED' } } + }, { + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: customErrorDiagnosticsObj ?? { probeId, version: 0, status: 'ERROR' } } + }] + + agent.on('debugger-diagnostics', ({ payload }) => { + const expected = expectedPayloads.shift() + assertObjectContains(payload, expected) + const { diagnostics } = payload.debugger + assertUUID(diagnostics.runtimeId) + + if (diagnostics.status === 'ERROR') { + assert.property(diagnostics, 'exception') + assert.hasAllKeys(diagnostics.exception, ['message', 'stacktrace']) + assert.typeOf(diagnostics.exception.message, 'string') + assert.typeOf(diagnostics.exception.stacktrace, 'string') + } + + endIfDone() + }) + + agent.addRemoteConfig({ + product: 'LIVE_DEBUGGING', + id: `logProbe_${config.id}`, + config + }) + + function endIfDone () { + if (receivedAckUpdate && expectedPayloads.length === 0) done() + } + }) + } + }) + + describe('input messages', function () { + it('should capture and send expected snapshot when a log line probe is triggered', function (done) { + agent.on('debugger-diagnostics', ({ payload }) => { + if (payload.debugger.diagnostics.status === 'INSTALLED') { + axios.get('/foo') + } + }) + + agent.on('debugger-input', ({ payload }) => { + const expected = { + ddsource: 'dd_debugger', + service: 'node', + message: 'Hello World!', + logger: { + name: 'debugger/target-app/index.js', + method: 'handler', + version, + thread_name: 'MainThread' + }, + 'debugger.snapshot': { + probe: { + id: probeConfig.config.id, + version: 0, + location: { file: probeFile, lines: [probeLineNo] } + }, + language: 'javascript' + } + } + + assertObjectContains(payload, expected) + assert.match(payload.logger.thread_id, /^pid:\d+$/) + assertUUID(payload['debugger.snapshot'].id) + assert.isNumber(payload['debugger.snapshot'].timestamp) + assert.isTrue(payload['debugger.snapshot'].timestamp > Date.now() - 1000 * 60) + assert.isTrue(payload['debugger.snapshot'].timestamp <= Date.now()) + + done() + }) + + agent.addRemoteConfig(probeConfig) + }) + + it('should respond with updated message if probe message is updated', function (done) { + const expectedMessages = ['Hello World!', 'Hello Updated World!'] + const triggers = [ + async () => { + await axios.get('/foo') + probeConfig.config.version++ + probeConfig.config.template = 'Hello Updated World!' + agent.updateRemoteConfig(probeConfig.id, probeConfig.config) + }, + async () => { + await axios.get('/foo') + } + ] + + agent.on('debugger-diagnostics', ({ payload }) => { + if (payload.debugger.diagnostics.status === 'INSTALLED') triggers.shift()().catch(done) + }) + + agent.on('debugger-input', ({ payload }) => { + assert.strictEqual(payload.message, expectedMessages.shift()) + if (expectedMessages.length === 0) done() + }) + + agent.addRemoteConfig(probeConfig) + }) + + it('should not trigger if probe is deleted', function (done) { + agent.on('debugger-diagnostics', async ({ payload }) => { + try { + if (payload.debugger.diagnostics.status === 'INSTALLED') { + agent.once('remote-confg-responded', async () => { + try { + await axios.get('/foo') + // We want to wait enough time to see if the client triggers on the breakpoint so that the test can fail + // if it does, but not so long that the test times out. + // TODO: Is there some signal we can use instead of a timer? + setTimeout(done, pollInterval * 2 * 1000) // wait twice as long as the RC poll interval + } catch (err) { + // Nessecary hack: Any errors thrown inside of an async function is invisible to Mocha unless the outer + // `it` callback is also `async` (which we can't do in this case since we rely on the `done` callback). + done(err) + } + }) + + agent.removeRemoteConfig(probeConfig.id) + } + } catch (err) { + // Nessecary hack: Any errors thrown inside of an async function is invisible to Mocha unless the outer `it` + // callback is also `async` (which we can't do in this case since we rely on the `done` callback). + done(err) + } + }) + + agent.on('debugger-input', () => { + assert.fail('should not capture anything when the probe is deleted') + }) + + agent.addRemoteConfig(probeConfig) + }) + }) + + describe('race conditions', () => { + it('should remove the last breakpoint completely before trying to add a new one', (done) => { + const probeId1 = probeConfig.config.id + const probeId2 = randomUUID() + const probeConfig2 = { + product: 'LIVE_DEBUGGING', + id: `logProbe_${probeId2}`, + config: generateProbeConfig({ id: probeId2 }) + } + + agent.on('debugger-diagnostics', ({ payload: { debugger: { diagnostics: { status, probeId } } } }) => { + if (status !== 'INSTALLED') return + + if (probeId === probeId1) { + // First INSTALLED payload: Try to trigger the race condition. + agent.removeRemoteConfig(probeConfig.id) + agent.addRemoteConfig(probeConfig2) + } else { + // Second INSTALLED payload: Perform an HTTP request to see if we successfully handled the race condition. + let finished = false + + // If the race condition occurred, the debugger will have been detached from the main thread and the new + // probe will never trigger. If that's the case, the following timer will fire: + const timer = setTimeout(() => { + done(new Error('Race condition occurred!')) + }, 1000) + + // If we successfully handled the race condition, the probe will trigger, we'll get a probe result and the + // following event listener will be called: + agent.once('debugger-input', () => { + clearTimeout(timer) + finished = true + done() + }) + + // Perform HTTP request to try and trigger the probe + axios.get('/foo').catch((err) => { + // If the request hasn't fully completed by the time the tests ends and the target app is destroyed, Axios + // will complain with a "socket hang up" error. Hence this sanity check before calling `done(err)`. If we + // later add more tests below this one, this shouuldn't be an issue. + if (!finished) done(err) + }) + } + }) + + agent.addRemoteConfig(probeConfig) + }) + }) +}) + +function generateProbeConfig (overrides) { + return { + id: randomUUID(), + version: 0, + type: 'LOG_PROBE', + language: 'javascript', + where: { sourceFile: probeFile, lines: [String(probeLineNo)] }, + tags: [], + template: 'Hello World!', + segments: [{ str: 'Hello World!' }], + captureSnapshot: false, + capture: { maxReferenceDepth: 3 }, + sampling: { snapshotsPerSecond: 5000 }, + evaluateAt: 'EXIT', + ...overrides + } +} diff --git a/integration-tests/debugger/target-app/index.js b/integration-tests/debugger/target-app/index.js new file mode 100644 index 00000000000..d0e1b7fb6dd --- /dev/null +++ b/integration-tests/debugger/target-app/index.js @@ -0,0 +1,18 @@ +'use strict' + +require('dd-trace/init') +const Fastify = require('fastify') + +const fastify = Fastify() + +fastify.get('/:name', function handler (request) { + return { hello: request.params.name } +}) + +fastify.listen({ port: process.env.APP_PORT }, (err) => { + if (err) { + fastify.log.error(err) + process.exit(1) + } + process.send({ port: process.env.APP_PORT }) +}) diff --git a/integration-tests/helpers/fake-agent.js b/integration-tests/helpers/fake-agent.js index 86c6890bf00..70aff2ecfa8 100644 --- a/integration-tests/helpers/fake-agent.js +++ b/integration-tests/helpers/fake-agent.js @@ -13,8 +13,7 @@ module.exports = class FakeAgent extends EventEmitter { constructor (port = 0) { super() this.port = port - this._rcFiles = {} - this._rcTargetsVersion = 0 + this.resetRemoteConfig() } async start () { @@ -95,11 +94,12 @@ module.exports = class FakeAgent extends EventEmitter { } /** - * Remove any existing config added by calls to FakeAgent#addRemoteConfig. + * Reset any existing Remote Config state. Usefull in `before` and `beforeEach` blocks. */ resetRemoteConfig () { this._rcFiles = {} - this._rcTargetsVersion++ + this._rcTargetsVersion = 0 + this._rcSeenStates = new Set() } // **resolveAtFirstSuccess** - specific use case for Next.js (or any other future libraries) @@ -216,13 +216,23 @@ function buildExpressServer (agent) { console.error(state.error) // eslint-disable-line no-console } - for (const { apply_error: error } of state.config_states) { - if (error) { + for (const cs of state.config_states) { + const uniqueState = `${cs.id}-${cs.version}-${cs.apply_state}` + if (!agent._rcSeenStates.has(uniqueState)) { + agent._rcSeenStates.add(uniqueState) + agent.emit('remote-config-ack-update', cs.id, cs.version, cs.apply_state, cs.apply_error) + } + + if (cs.apply_error) { // Print the error sent by the client in case it's useful in debugging tests - console.error(error) // eslint-disable-line no-console + console.error(cs.apply_error) // eslint-disable-line no-console } } + res.on('close', () => { + agent.emit('remote-confg-responded') + }) + if (agent._rcTargetsVersion === state.targets_version) { // If the state hasn't changed since the last time the client asked, just return an empty result res.json({}) @@ -272,6 +282,22 @@ function buildExpressServer (agent) { }) }) + app.post('/debugger/v1/input', (req, res) => { + res.status(200).send() + agent.emit('debugger-input', { + headers: req.headers, + payload: req.body + }) + }) + + app.post('/debugger/v1/diagnostics', upload.any(), (req, res) => { + res.status(200).send() + agent.emit('debugger-diagnostics', { + headers: req.headers, + payload: JSON.parse(req.files[0].buffer.toString()) + }) + }) + app.post('/profiling/v1/input', upload.any(), (req, res) => { res.status(200).send() agent.emit('message', { diff --git a/integration-tests/helpers/index.js b/integration-tests/helpers/index.js index 49a04544322..98074ba89b4 100644 --- a/integration-tests/helpers/index.js +++ b/integration-tests/helpers/index.js @@ -334,12 +334,32 @@ function useSandbox (...args) { return oldSandbox.remove() }) } + function sandboxCwd () { return sandbox.folder } +function assertObjectContains (actual, expected) { + for (const [key, val] of Object.entries(expected)) { + if (val !== null && typeof val === 'object') { + assert.ok(key in actual) + assert.notStrictEqual(actual[key], null) + assert.strictEqual(typeof actual[key], 'object') + assertObjectContains(actual[key], val) + } else { + assert.strictEqual(actual[key], expected[key]) + } + } +} + +function assertUUID (actual, msg = 'not a valid UUID') { + assert.match(actual, /^[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}$/, msg) +} + module.exports = { FakeAgent, + assertObjectContains, + assertUUID, spawnProc, runAndCheckWithTelemetry, createSandbox, diff --git a/package.json b/package.json index bdca10d0820..aaeaa14473b 100644 --- a/package.json +++ b/package.json @@ -20,6 +20,8 @@ "test:appsec:ci": "nyc --no-clean --include \"packages/dd-trace/src/appsec/**/*.js\" --exclude \"packages/dd-trace/test/appsec/**/*.plugin.spec.js\" -- npm run test:appsec", "test:appsec:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/appsec/**/*.@($(echo $PLUGINS)).plugin.spec.js\"", "test:appsec:plugins:ci": "yarn services && nyc --no-clean --include \"packages/dd-trace/src/appsec/**/*.js\" -- npm run test:appsec:plugins", + "test:debugger": "tap packages/dd-trace/test/debugger/**/*.spec.js", + "test:debugger:ci": "npm run test:debugger -- --coverage --nyc-arg=--include=\"packages/dd-trace/src/debugger/**/*.js\"", "test:trace:core": "tap packages/dd-trace/test/*.spec.js \"packages/dd-trace/test/{ci-visibility,datastreams,encode,exporters,opentelemetry,opentracing,plugins,service-naming,telemetry}/**/*.spec.js\"", "test:trace:core:ci": "npm run test:trace:core -- --coverage --nyc-arg=--include=\"packages/dd-trace/src/**/*.js\"", "test:instrumentations": "mocha -r 'packages/dd-trace/test/setup/mocha.js' 'packages/datadog-instrumentations/test/**/*.spec.js'", @@ -37,6 +39,7 @@ "test:integration:appsec": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/appsec/*.spec.js\"", "test:integration:cucumber": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/cucumber/*.spec.js\"", "test:integration:cypress": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/cypress/*.spec.js\"", + "test:integration:debugger": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/debugger/*.spec.js\"", "test:integration:jest": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/jest/*.spec.js\"", "test:integration:mocha": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/mocha/*.spec.js\"", "test:integration:playwright": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/playwright/*.spec.js\"", diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index db473b42525..0060062b4eb 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -3,7 +3,7 @@ const fs = require('fs') const os = require('os') const uuid = require('crypto-randomuuid') // we need to keep the old uuid dep because of cypress -const URL = require('url').URL +const { URL } = require('url') const log = require('./log') const pkg = require('./pkg') const coalesce = require('koalas') @@ -468,6 +468,7 @@ class Config { this._setValue(defaults, 'dogstatsd.hostname', '127.0.0.1') this._setValue(defaults, 'dogstatsd.port', '8125') this._setValue(defaults, 'dsmEnabled', false) + this._setValue(defaults, 'dynamicInstrumentationEnabled', false) this._setValue(defaults, 'env', undefined) this._setValue(defaults, 'experimental.enableGetRumData', false) this._setValue(defaults, 'experimental.exporter', undefined) @@ -574,6 +575,7 @@ class Config { DD_DBM_PROPAGATION_MODE, DD_DOGSTATSD_HOSTNAME, DD_DOGSTATSD_PORT, + DD_DYNAMIC_INSTRUMENTATION_ENABLED, DD_ENV, DD_EXPERIMENTAL_API_SECURITY_ENABLED, DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED, @@ -703,6 +705,7 @@ class Config { this._setString(env, 'dogstatsd.hostname', DD_DOGSTATSD_HOSTNAME) this._setString(env, 'dogstatsd.port', DD_DOGSTATSD_PORT) this._setBoolean(env, 'dsmEnabled', DD_DATA_STREAMS_ENABLED) + this._setBoolean(env, 'dynamicInstrumentationEnabled', DD_DYNAMIC_INSTRUMENTATION_ENABLED) this._setString(env, 'env', DD_ENV || tags.env) this._setBoolean(env, 'experimental.enableGetRumData', DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED) this._setString(env, 'experimental.exporter', DD_TRACE_EXPERIMENTAL_EXPORTER) @@ -870,11 +873,11 @@ class Config { this._setString(opts, 'dogstatsd.port', options.dogstatsd.port) } this._setBoolean(opts, 'dsmEnabled', options.dsmEnabled) + this._setBoolean(opts, 'dynamicInstrumentationEnabled', options.experimental?.dynamicInstrumentationEnabled) this._setString(opts, 'env', options.env || tags.env) - this._setBoolean(opts, 'experimental.enableGetRumData', - options.experimental && options.experimental.enableGetRumData) - this._setString(opts, 'experimental.exporter', options.experimental && options.experimental.exporter) - this._setBoolean(opts, 'experimental.runtimeId', options.experimental && options.experimental.runtimeId) + this._setBoolean(opts, 'experimental.enableGetRumData', options.experimental?.enableGetRumData) + this._setString(opts, 'experimental.exporter', options.experimental?.exporter) + this._setBoolean(opts, 'experimental.runtimeId', options.experimental?.runtimeId) this._setValue(opts, 'flushInterval', maybeInt(options.flushInterval)) this._optsUnprocessed.flushInterval = options.flushInterval this._setValue(opts, 'flushMinSpans', maybeInt(options.flushMinSpans)) diff --git a/packages/dd-trace/src/debugger/devtools_client/config.js b/packages/dd-trace/src/debugger/devtools_client/config.js new file mode 100644 index 00000000000..3e7c19715e1 --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/config.js @@ -0,0 +1,24 @@ +'use strict' + +const { workerData: { config: parentConfig, parentThreadId, configPort } } = require('node:worker_threads') +const { format } = require('node:url') +const log = require('../../log') + +const config = module.exports = { + runtimeId: parentConfig.tags['runtime-id'], + service: parentConfig.service, + parentThreadId +} + +updateUrl(parentConfig) + +configPort.on('message', updateUrl) +configPort.on('messageerror', (err) => log.error(err)) + +function updateUrl (updates) { + config.url = updates.url || format({ + protocol: 'http:', + hostname: updates.hostname || 'localhost', + port: updates.port + }) +} diff --git a/packages/dd-trace/src/debugger/devtools_client/index.js b/packages/dd-trace/src/debugger/devtools_client/index.js new file mode 100644 index 00000000000..f4789ea65a8 --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/index.js @@ -0,0 +1,57 @@ +'use strict' + +const { randomUUID } = require('crypto') +const { breakpoints } = require('./state') +const session = require('./session') +const send = require('./send') +const { ackEmitting } = require('./status') +const { parentThreadId } = require('./config') +const log = require('../../log') +const { version } = require('../../../../../package.json') + +require('./remote_config') + +// There doesn't seem to be an official standard for the content of these fields, so we're just populating them with +// something that should be useful to a Node.js developer. +const threadId = parentThreadId === 0 ? `pid:${process.pid}` : `pid:${process.pid};tid:${parentThreadId}` +const threadName = parentThreadId === 0 ? 'MainThread' : `WorkerThread:${parentThreadId}` + +session.on('Debugger.paused', async ({ params }) => { + const start = process.hrtime.bigint() + const timestamp = Date.now() + const probes = params.hitBreakpoints.map((id) => breakpoints.get(id)) + await session.post('Debugger.resume') + const diff = process.hrtime.bigint() - start // TODO: Should this be recored as telemetry? + + log.debug(`Finished processing breakpoints - main thread paused for: ${Number(diff) / 1000000} ms`) + + const logger = { + // We can safely use `location.file` from the first probe in the array, since all probes hit by `hitBreakpoints` + // must exist in the same file since the debugger can only pause the main thread in one location. + name: probes[0].location.file, // name of the class/type/file emitting the snapshot + method: params.callFrames[0].functionName, // name of the method/function emitting the snapshot + version, + thread_id: threadId, + thread_name: threadName + } + + // TODO: Send multiple probes in one HTTP request as an array + for (const probe of probes) { + const snapshot = { + id: randomUUID(), + timestamp, + probe: { + id: probe.id, + version: probe.version, + location: probe.location + }, + language: 'javascript' + } + + // TODO: Process template + send(probe.template, logger, snapshot, (err) => { + if (err) log.error(err) + else ackEmitting(probe) + }) + } +}) diff --git a/packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js b/packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js new file mode 100644 index 00000000000..bb4b0340be6 --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js @@ -0,0 +1,23 @@ +'use strict' + +const { builtinModules } = require('node:module') + +if (builtinModules.includes('inspector/promises')) { + module.exports = require('node:inspector/promises') +} else { + const inspector = require('node:inspector') + const { promisify } = require('node:util') + + // The rest of the code in this file is lifted from: + // https://github.com/nodejs/node/blob/1d4d76ff3fb08f9a0c55a1d5530b46c4d5d550c7/lib/inspector/promises.js + class Session extends inspector.Session { + constructor () { super() } // eslint-disable-line no-useless-constructor + } + + Session.prototype.post = promisify(inspector.Session.prototype.post) + + module.exports = { + ...inspector, + Session + } +} diff --git a/packages/dd-trace/src/debugger/devtools_client/remote_config.js b/packages/dd-trace/src/debugger/devtools_client/remote_config.js new file mode 100644 index 00000000000..25ac070cc9f --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/remote_config.js @@ -0,0 +1,164 @@ +'use strict' + +const { workerData: { rcPort } } = require('node:worker_threads') +const { getScript, probes, breakpoints } = require('./state') +const session = require('./session') +const { ackReceived, ackInstalled, ackError } = require('./status') +const log = require('../../log') + +let sessionStarted = false + +// Example log line probe (simplified): +// { +// id: '100c9a5c-45ad-49dc-818b-c570d31e11d1', +// version: 0, +// type: 'LOG_PROBE', +// where: { sourceFile: 'index.js', lines: ['25'] }, // only use first array element +// template: 'Hello World 2', +// segments: [...], +// captureSnapshot: true, +// capture: { maxReferenceDepth: 1 }, +// sampling: { snapshotsPerSecond: 1 }, +// evaluateAt: 'EXIT' // only used for method probes +// } +// +// Example log method probe (simplified): +// { +// id: 'd692ee6d-5734-4df7-9d86-e3bc6449cc8c', +// version: 0, +// type: 'LOG_PROBE', +// where: { typeName: 'index.js', methodName: 'handlerA' }, +// template: 'Executed index.js.handlerA, it took {@duration}ms', +// segments: [...], +// captureSnapshot: false, +// capture: { maxReferenceDepth: 3 }, +// sampling: { snapshotsPerSecond: 5000 }, +// evaluateAt: 'EXIT' // only used for method probes +// } +rcPort.on('message', async ({ action, conf: probe, ackId }) => { + try { + await processMsg(action, probe) + rcPort.postMessage({ ackId }) + } catch (err) { + rcPort.postMessage({ ackId, error: err }) + ackError(err, probe) + } +}) +rcPort.on('messageerror', (err) => log.error(err)) + +async function start () { + sessionStarted = true + return session.post('Debugger.enable') // return instead of await to reduce number of promises created +} + +async function stop () { + sessionStarted = false + return session.post('Debugger.disable') // return instead of await to reduce number of promises created +} + +async function processMsg (action, probe) { + log.debug(`Received request to ${action} ${probe.type} probe (id: ${probe.id}, version: ${probe.version})`) + + if (action !== 'unapply') ackReceived(probe) + + if (probe.type !== 'LOG_PROBE') { + throw new Error(`Unsupported probe type: ${probe.type} (id: ${probe.id}, version: ${probe.version})`) + } + if (!probe.where.sourceFile && !probe.where.lines) { + throw new Error( + // eslint-disable-next-line max-len + `Unsupported probe insertion point! Only line-based probes are supported (id: ${probe.id}, version: ${probe.version})` + ) + } + + // This lock is to ensure that we don't get the following race condition: + // + // When a breakpoint is being removed and there are no other breakpoints, we disable the debugger by calling + // `Debugger.disable` to free resources. However, if a new breakpoint is being added around the same time, we might + // have a race condition where the new breakpoint thinks that the debugger is already enabled because the removal of + // the other breakpoint hasn't had a chance to call `Debugger.disable` yet. Then once the code that's adding the new + // breakpoints tries to call `Debugger.setBreakpoint` it fails because in the meantime `Debugger.disable` was called. + // + // If the code is ever refactored to not tear down the debugger if there's no active breakpoints, we can safely remove + // this lock. + const release = await lock() + + try { + switch (action) { + case 'unapply': + await removeBreakpoint(probe) + break + case 'apply': + await addBreakpoint(probe) + break + case 'modify': + // TODO: Can we modify in place? + await removeBreakpoint(probe) + await addBreakpoint(probe) + break + default: + throw new Error( + // eslint-disable-next-line max-len + `Cannot process probe ${probe.id} (version: ${probe.version}) - unknown remote configuration action: ${action}` + ) + } + } finally { + release() + } +} + +async function addBreakpoint (probe) { + if (!sessionStarted) await start() + + const file = probe.where.sourceFile + const line = Number(probe.where.lines[0]) // Tracer doesn't support multiple-line breakpoints + + // Optimize for sending data to /debugger/v1/input endpoint + probe.location = { file, lines: [line] } + delete probe.where + + // TODO: Inbetween `await session.post('Debugger.enable')` and here, the scripts are parsed and cached. + // Maybe there's a race condition here or maybe we're guraenteed that `await session.post('Debugger.enable')` will + // not continue untill all scripts have been parsed? + const script = getScript(file) + if (!script) throw new Error(`No loaded script found for ${file} (probe: ${probe.id}, version: ${probe.version})`) + const [path, scriptId] = script + + log.debug(`Adding breakpoint at ${path}:${line} (probe: ${probe.id}, version: ${probe.version})`) + + const { breakpointId } = await session.post('Debugger.setBreakpoint', { + location: { + scriptId, + lineNumber: line - 1 // Beware! lineNumber is zero-indexed + } + }) + + probes.set(probe.id, breakpointId) + breakpoints.set(breakpointId, probe) + + ackInstalled(probe) +} + +async function removeBreakpoint ({ id }) { + if (!sessionStarted) { + // We should not get in this state, but abort if we do, so the code doesn't fail unexpected + throw Error(`Cannot remove probe ${id}: Debugger not started`) + } + if (!probes.has(id)) { + throw Error(`Unknown probe id: ${id}`) + } + + const breakpointId = probes.get(id) + await session.post('Debugger.removeBreakpoint', { breakpointId }) + probes.delete(id) + breakpoints.delete(breakpointId) + + if (breakpoints.size === 0) await stop() +} + +async function lock () { + if (lock.p) await lock.p + let resolve + lock.p = new Promise((_resolve) => { resolve = _resolve }).then(() => { lock.p = null }) + return resolve +} diff --git a/packages/dd-trace/src/debugger/devtools_client/send.js b/packages/dd-trace/src/debugger/devtools_client/send.js new file mode 100644 index 00000000000..709e14d52b7 --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/send.js @@ -0,0 +1,28 @@ +'use strict' + +const config = require('./config') +const request = require('../../exporters/common/request') + +module.exports = send + +const ddsource = 'dd_debugger' +const service = config.service + +function send (message, logger, snapshot, cb) { + const opts = { + method: 'POST', + url: config.url, + path: '/debugger/v1/input', + headers: { 'Content-Type': 'application/json; charset=utf-8' } + } + + const payload = { + ddsource, + service, + message, + logger, + 'debugger.snapshot': snapshot + } + + request(JSON.stringify(payload), opts, cb) +} diff --git a/packages/dd-trace/src/debugger/devtools_client/session.js b/packages/dd-trace/src/debugger/devtools_client/session.js new file mode 100644 index 00000000000..3cda2322b36 --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/session.js @@ -0,0 +1,7 @@ +'use strict' + +const inspector = require('./inspector_promises_polyfill') + +const session = module.exports = new inspector.Session() + +session.connectToMainThread() diff --git a/packages/dd-trace/src/debugger/devtools_client/state.js b/packages/dd-trace/src/debugger/devtools_client/state.js new file mode 100644 index 00000000000..316841667fb --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/state.js @@ -0,0 +1,47 @@ +'use strict' + +const session = require('./session') + +const scripts = [] + +module.exports = { + probes: new Map(), + breakpoints: new Map(), + + /** + * Find the matching script that can be inspected based on a partial path. + * + * Algorithm: Find the sortest url that ends in the requested path. + * + * Will identify the correct script as long as Node.js doesn't load a module from a `node_modules` folder outside the + * project root. If so, there's a risk that this path is shorter than the expected path inside the project root. + * Example of mismatch where path = `index.js`: + * + * Expected match: /www/code/my-projects/demo-project1/index.js + * Actual shorter match: /www/node_modules/dd-trace/index.js + * + * To fix this, specify a more unique file path, e.g `demo-project1/index.js` instead of `index.js` + * + * @param {string} path + * @returns {[string, string] | undefined} + */ + getScript (path) { + return scripts + .filter(([url]) => url.endsWith(path)) + .sort(([a], [b]) => a.length - b.length)[0] + } +} + +// Known params.url protocols: +// - `node:` - Ignored, as we don't want to instrument Node.js internals +// - `wasm:` - Ignored, as we don't support instrumenting WebAssembly +// - `file:` - Regular on-disk file +// Unknown params.url values: +// - `structured-stack` - Not sure what this is, but should just be ignored +// - `` - Not sure what this is, but should just be ignored +// TODO: Event fired for all files, every time debugger is enabled. So when we disable it, we need to reset the state +session.on('Debugger.scriptParsed', ({ params }) => { + if (params.url.startsWith('file:')) { + scripts.push([params.url, params.scriptId]) + } +}) diff --git a/packages/dd-trace/src/debugger/devtools_client/status.js b/packages/dd-trace/src/debugger/devtools_client/status.js new file mode 100644 index 00000000000..e4ba10d8c55 --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/status.js @@ -0,0 +1,109 @@ +'use strict' + +const LRUCache = require('lru-cache') +const config = require('./config') +const request = require('../../exporters/common/request') +const FormData = require('../../exporters/common/form-data') +const log = require('../../log') + +module.exports = { + ackReceived, + ackInstalled, + ackEmitting, + ackError +} + +const ddsource = 'dd_debugger' +const service = config.service +const runtimeId = config.runtimeId + +const cache = new LRUCache({ + ttl: 1000 * 60 * 60, // 1 hour + // Unfortunate requirement when using LRUCache: + // It will emit a warning unless `ttlAutopurge`, `max`, or `maxSize` is set when using `ttl`. + // TODO: Consider alternative as this is NOT performant :( + ttlAutopurge: true +}) + +const STATUSES = { + RECEIVED: 'RECEIVED', + INSTALLED: 'INSTALLED', + EMITTING: 'EMITTING', + ERROR: 'ERROR', + BLOCKED: 'BLOCKED' // TODO: Implement once support for allow list, deny list or max probe limit has been added +} + +function ackReceived ({ id: probeId, version }) { + onlyUniqueUpdates( + STATUSES.RECEIVED, probeId, version, + () => send(statusPayload(probeId, version, STATUSES.RECEIVED)) + ) +} + +function ackInstalled ({ id: probeId, version }) { + onlyUniqueUpdates( + STATUSES.INSTALLED, probeId, version, + () => send(statusPayload(probeId, version, STATUSES.INSTALLED)) + ) +} + +function ackEmitting ({ id: probeId, version }) { + onlyUniqueUpdates( + STATUSES.EMITTING, probeId, version, + () => send(statusPayload(probeId, version, STATUSES.EMITTING)) + ) +} + +function ackError (err, { id: probeId, version }) { + log.error(err) + + onlyUniqueUpdates(STATUSES.ERROR, probeId, version, () => { + const payload = statusPayload(probeId, version, STATUSES.ERROR) + + payload.debugger.diagnostics.exception = { + type: err.code, + message: err.message, + stacktrace: err.stack + } + + send(payload) + }) +} + +function send (payload) { + const form = new FormData() + + form.append( + 'event', + JSON.stringify(payload), + { filename: 'event.json', contentType: 'application/json; charset=utf-8' } + ) + + const options = { + method: 'POST', + url: config.url, + path: '/debugger/v1/diagnostics', + headers: form.getHeaders() + } + + request(form, options, (err) => { + if (err) log.error(err) + }) +} + +function statusPayload (probeId, version, status) { + return { + ddsource, + service, + debugger: { + diagnostics: { probeId, runtimeId, version, status } + } + } +} + +function onlyUniqueUpdates (type, id, version, fn) { + const key = `${type}-${id}-${version}` + if (cache.has(key)) return + fn() + cache.set(key) +} diff --git a/packages/dd-trace/src/debugger/index.js b/packages/dd-trace/src/debugger/index.js new file mode 100644 index 00000000000..5db1a440cf2 --- /dev/null +++ b/packages/dd-trace/src/debugger/index.js @@ -0,0 +1,92 @@ +'use strict' + +const { join } = require('path') +const { Worker, MessageChannel, threadId: parentThreadId } = require('worker_threads') +const log = require('../log') + +let worker = null +let configChannel = null + +const { NODE_OPTIONS, ...env } = process.env + +module.exports = { + start, + configure +} + +function start (config, rc) { + if (worker !== null) return + + log.debug('Starting Dynamic Instrumentation client...') + + const rcAckCallbacks = new Map() + const rcChannel = new MessageChannel() + configChannel = new MessageChannel() + + rc.setProductHandler('LIVE_DEBUGGING', (action, conf, id, ack) => { + const ackId = `${id}-${conf.version}` + rcAckCallbacks.set(ackId, ack) + rcChannel.port2.postMessage({ action, conf, ackId }) + }) + + rcChannel.port2.on('message', ({ ackId, error }) => { + rcAckCallbacks.get(ackId)(error) + rcAckCallbacks.delete(ackId) + }) + rcChannel.port2.on('messageerror', (err) => log.error(err)) + + worker = new Worker( + join(__dirname, 'devtools_client', 'index.js'), + { + execArgv: [], // Avoid worker thread inheriting the `-r` command line argument + env, // Avoid worker thread inheriting the `NODE_OPTIONS` environment variable (in case it contains `-r`) + workerData: { + config: serializableConfig(config), + parentThreadId, + rcPort: rcChannel.port1, + configPort: configChannel.port1 + }, + transferList: [rcChannel.port1, configChannel.port1] + } + ) + + worker.unref() + + worker.on('online', () => { + log.debug(`Dynamic Instrumentation worker thread started successfully (thread id: ${worker.threadId})`) + }) + + worker.on('error', (err) => log.error(err)) + worker.on('messageerror', (err) => log.error(err)) + + worker.on('exit', (code) => { + const error = new Error(`Dynamic Instrumentation worker thread exited unexpectedly with code ${code}`) + + log.error(error) + + // Be nice, clean up now that the worker thread encounted an issue and we can't continue + rc.removeProductHandler('LIVE_DEBUGGING') + worker.removeAllListeners() + configChannel = null + for (const ackId of rcAckCallbacks.keys()) { + rcAckCallbacks.get(ackId)(error) + rcAckCallbacks.delete(ackId) + } + }) +} + +function configure (config) { + if (configChannel === null) return + configChannel.port2.postMessage(serializableConfig(config)) +} + +// TODO: Refactor the Config class so it never produces any config objects that are incompatible with MessageChannel +function serializableConfig (config) { + // URL objects cannot be serialized over the MessageChannel, so we need to convert them to strings first + if (config.url instanceof URL) { + config = { ...config } + config.url = config.url.toString() + } + + return config +} diff --git a/packages/dd-trace/src/proxy.js b/packages/dd-trace/src/proxy.js index cfbd92bd5c8..d7ce0538f39 100644 --- a/packages/dd-trace/src/proxy.js +++ b/packages/dd-trace/src/proxy.js @@ -5,6 +5,7 @@ const Config = require('./config') const runtimeMetrics = require('./runtime_metrics') const log = require('./log') const { setStartupLogPluginManager } = require('./startup-log') +const DynamicInstrumentation = require('./debugger') const telemetry = require('./telemetry') const nomenclature = require('./service-naming') const PluginManager = require('./plugin_manager') @@ -110,6 +111,10 @@ class Tracer extends NoopProxy { this._flare.enable(config) this._flare.module.send(conf.args) }) + + if (config.dynamicInstrumentationEnabled) { + DynamicInstrumentation.start(config, rc) + } } if (config.isGCPFunction || config.isAzureFunction) { @@ -196,6 +201,7 @@ class Tracer extends NoopProxy { if (this._tracingInitialized) { this._tracer.configure(config) this._pluginManager.configure(config) + DynamicInstrumentation.configure(config) setStartupLogPluginManager(this._pluginManager) } } diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index 0c18150850e..e047ab74e9d 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -219,6 +219,7 @@ describe('Config', () => { expect(config).to.have.property('reportHostname', false) expect(config).to.have.property('scope', undefined) expect(config).to.have.property('logLevel', 'debug') + expect(config).to.have.property('dynamicInstrumentationEnabled', false) expect(config).to.have.property('traceId128BitGenerationEnabled', true) expect(config).to.have.property('traceId128BitLoggingEnabled', false) expect(config).to.have.property('spanAttributeSchema', 'v0') @@ -294,6 +295,7 @@ describe('Config', () => { { name: 'dogstatsd.hostname', value: '127.0.0.1', origin: 'calculated' }, { name: 'dogstatsd.port', value: '8125', origin: 'default' }, { name: 'dsmEnabled', value: false, origin: 'default' }, + { name: 'dynamicInstrumentationEnabled', value: false, origin: 'default' }, { name: 'env', value: undefined, origin: 'default' }, { name: 'experimental.enableGetRumData', value: false, origin: 'default' }, { name: 'experimental.exporter', value: undefined, origin: 'default' }, @@ -425,6 +427,7 @@ describe('Config', () => { process.env.DD_RUNTIME_METRICS_ENABLED = 'true' process.env.DD_TRACE_REPORT_HOSTNAME = 'true' process.env.DD_ENV = 'test' + process.env.DD_DYNAMIC_INSTRUMENTATION_ENABLED = 'true' process.env.DD_TRACE_GLOBAL_TAGS = 'foo:bar,baz:qux' process.env.DD_TRACE_SAMPLE_RATE = '0.5' process.env.DD_TRACE_RATE_LIMIT = '-1' @@ -506,6 +509,7 @@ describe('Config', () => { expect(config).to.have.property('clientIpHeader', 'x-true-client-ip') expect(config).to.have.property('runtimeMetrics', true) expect(config).to.have.property('reportHostname', true) + expect(config).to.have.property('dynamicInstrumentationEnabled', true) expect(config).to.have.property('env', 'test') expect(config).to.have.property('sampleRate', 0.5) expect(config).to.have.property('traceId128BitGenerationEnabled', true) @@ -602,6 +606,7 @@ describe('Config', () => { { name: 'clientIpHeader', value: 'x-true-client-ip', origin: 'env_var' }, { name: 'dogstatsd.hostname', value: 'dsd-agent', origin: 'env_var' }, { name: 'dogstatsd.port', value: '5218', origin: 'env_var' }, + { name: 'dynamicInstrumentationEnabled', value: true, origin: 'env_var' }, { name: 'env', value: 'test', origin: 'env_var' }, { name: 'experimental.enableGetRumData', value: true, origin: 'env_var' }, { name: 'experimental.exporter', value: 'log', origin: 'env_var' }, @@ -761,6 +766,7 @@ describe('Config', () => { }, experimental: { b3: true, + dynamicInstrumentationEnabled: true, traceparent: true, runtimeId: true, exporter: 'log', @@ -798,6 +804,7 @@ describe('Config', () => { expect(config).to.have.nested.property('dogstatsd.port', '5218') expect(config).to.have.property('service', 'service') expect(config).to.have.property('version', '0.1.0') + expect(config).to.have.property('dynamicInstrumentationEnabled', true) expect(config).to.have.property('env', 'test') expect(config).to.have.property('sampleRate', 0.5) expect(config).to.have.property('logger', logger) @@ -869,6 +876,7 @@ describe('Config', () => { { name: 'clientIpHeader', value: 'x-true-client-ip', origin: 'code' }, { name: 'dogstatsd.hostname', value: 'agent-dsd', origin: 'code' }, { name: 'dogstatsd.port', value: '5218', origin: 'code' }, + { name: 'dynamicInstrumentationEnabled', value: true, origin: 'code' }, { name: 'env', value: 'test', origin: 'code' }, { name: 'experimental.enableGetRumData', value: true, origin: 'code' }, { name: 'experimental.exporter', value: 'log', origin: 'code' }, @@ -1039,6 +1047,7 @@ describe('Config', () => { process.env.DD_RUNTIME_METRICS_ENABLED = 'true' process.env.DD_TRACE_REPORT_HOSTNAME = 'true' process.env.DD_ENV = 'test' + process.env.DD_DYNAMIC_INSTRUMENTATION_ENABLED = 'true' process.env.DD_API_KEY = '123' process.env.DD_TRACE_SPAN_ATTRIBUTE_SCHEMA = 'v0' process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED = 'false' @@ -1112,6 +1121,7 @@ describe('Config', () => { }, experimental: { b3: false, + dynamicInstrumentationEnabled: false, traceparent: false, runtimeId: false, exporter: 'agent', @@ -1167,6 +1177,7 @@ describe('Config', () => { expect(config).to.have.property('flushMinSpans', 500) expect(config).to.have.property('service', 'test') expect(config).to.have.property('version', '1.0.0') + expect(config).to.have.property('dynamicInstrumentationEnabled', false) expect(config).to.have.property('env', 'development') expect(config).to.have.property('clientIpEnabled', true) expect(config).to.have.property('clientIpHeader', 'x-true-client-ip') diff --git a/packages/dd-trace/test/debugger/devtools_client/status.spec.js b/packages/dd-trace/test/debugger/devtools_client/status.spec.js new file mode 100644 index 00000000000..728279c7eca --- /dev/null +++ b/packages/dd-trace/test/debugger/devtools_client/status.spec.js @@ -0,0 +1,102 @@ +'use strict' + +require('../../setup/tap') + +const ddsource = 'dd_debugger' +const service = 'my-service' +const runtimeId = 'my-runtime-id' + +describe('diagnostic message http request caching', () => { + let statusproxy, request + + const acks = [ + ['ackReceived', 'RECEIVED'], + ['ackInstalled', 'INSTALLED'], + ['ackEmitting', 'EMITTING'], + ['ackError', 'ERROR', new Error('boom')] + ] + + beforeEach(() => { + request = sinon.spy() + request['@noCallThru'] = true + + statusproxy = proxyquire('../src/debugger/devtools_client/status', { + './config': { service, runtimeId, '@noCallThru': true }, + '../../exporters/common/request': request + }) + }) + + for (const [ackFnName, status, err] of acks) { + describe(ackFnName, () => { + let ackFn, exception + + beforeEach(() => { + if (err) { + ackFn = statusproxy[ackFnName].bind(null, err) + // Use `JSON.stringify` to remove any fields that are `undefined` + exception = JSON.parse(JSON.stringify({ + type: err.code, + message: err.message, + stacktrace: err.stack + })) + } else { + ackFn = statusproxy[ackFnName] + exception = undefined + } + }) + + it('should only call once if no change', () => { + ackFn({ id: 'foo', version: 0 }) + expect(request).to.have.been.calledOnce + assertRequestData(request, { probeId: 'foo', version: 0, status, exception }) + + ackFn({ id: 'foo', version: 0 }) + expect(request).to.have.been.calledOnce + }) + + it('should call again if version changes', () => { + ackFn({ id: 'foo', version: 0 }) + expect(request).to.have.been.calledOnce + assertRequestData(request, { probeId: 'foo', version: 0, status, exception }) + + ackFn({ id: 'foo', version: 1 }) + expect(request).to.have.been.calledTwice + assertRequestData(request, { probeId: 'foo', version: 1, status, exception }) + }) + + it('should call again if probeId changes', () => { + ackFn({ id: 'foo', version: 0 }) + expect(request).to.have.been.calledOnce + assertRequestData(request, { probeId: 'foo', version: 0, status, exception }) + + ackFn({ id: 'bar', version: 0 }) + expect(request).to.have.been.calledTwice + assertRequestData(request, { probeId: 'bar', version: 0, status, exception }) + }) + }) + } +}) + +function assertRequestData (request, { probeId, version, status, exception }) { + const payload = getFormPayload(request) + const diagnostics = { probeId, runtimeId, version, status } + + // Error requests will also contain an `exception` property + if (exception) diagnostics.exception = exception + + expect(payload).to.deep.equal({ ddsource, service, debugger: { diagnostics } }) + + const opts = getRequestOptions(request) + expect(opts).to.have.property('method', 'POST') + expect(opts).to.have.property('path', '/debugger/v1/diagnostics') +} + +function getRequestOptions (request) { + return request.lastCall.args[1] +} + +function getFormPayload (request) { + const form = request.lastCall.args[0] + const payload = form._data[form._data.length - 2] // the last element is an empty line + return JSON.parse(payload) +}