dd-trace 5.90.0 → 5.91.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/index.d.ts +7 -0
  2. package/package.json +7 -6
  3. package/packages/datadog-instrumentations/src/cucumber.js +18 -3
  4. package/packages/datadog-instrumentations/src/helpers/hooks.js +1 -0
  5. package/packages/datadog-instrumentations/src/helpers/rewriter/instrumentations/index.js +1 -0
  6. package/packages/datadog-instrumentations/src/helpers/rewriter/instrumentations/langgraph.js +30 -0
  7. package/packages/datadog-instrumentations/src/jest.js +9 -2
  8. package/packages/datadog-instrumentations/src/langgraph.js +7 -0
  9. package/packages/datadog-instrumentations/src/mocha/main.js +32 -9
  10. package/packages/datadog-instrumentations/src/mocha/utils.js +0 -1
  11. package/packages/datadog-instrumentations/src/mocha/worker.js +2 -2
  12. package/packages/datadog-instrumentations/src/vitest.js +53 -24
  13. package/packages/datadog-plugin-cypress/src/cypress-plugin.js +1 -1
  14. package/packages/datadog-plugin-cypress/src/support.js +5 -7
  15. package/packages/datadog-plugin-langgraph/src/index.js +24 -0
  16. package/packages/datadog-plugin-langgraph/src/stream.js +41 -0
  17. package/packages/dd-trace/src/config/defaults.js +1 -0
  18. package/packages/dd-trace/src/config/index.js +8 -1
  19. package/packages/dd-trace/src/config/supported-configurations.json +7 -0
  20. package/packages/dd-trace/src/constants.js +1 -0
  21. package/packages/dd-trace/src/crashtracking/crashtracker.js +1 -1
  22. package/packages/dd-trace/src/debugger/devtools_client/config.js +3 -0
  23. package/packages/dd-trace/src/dogstatsd.js +1 -0
  24. package/packages/dd-trace/src/llmobs/plugins/langchain/handlers/chain.js +11 -0
  25. package/packages/dd-trace/src/llmobs/plugins/langchain/index.js +2 -0
  26. package/packages/dd-trace/src/llmobs/plugins/langgraph/index.js +114 -0
  27. package/packages/dd-trace/src/plugins/ci_plugin.js +2 -2
  28. package/packages/dd-trace/src/plugins/index.js +1 -0
  29. package/packages/dd-trace/src/plugins/util/test.js +7 -10
  30. package/packages/dd-trace/src/priority_sampler.js +20 -2
  31. package/packages/dd-trace/src/process-tags/index.js +41 -34
  32. package/packages/dd-trace/src/profiling/profilers/wall.js +9 -1
  33. package/packages/dd-trace/src/proxy.js +4 -0
package/index.d.ts CHANGED
@@ -262,6 +262,7 @@ interface Plugins {
262
262
  "knex": tracer.plugins.knex;
263
263
  "koa": tracer.plugins.koa;
264
264
  "langchain": tracer.plugins.langchain;
265
+ "langgraph": tracer.plugins.langgraph;
265
266
  "mariadb": tracer.plugins.mariadb;
266
267
  "memcached": tracer.plugins.memcached;
267
268
  "microgateway-core": tracer.plugins.microgateway_core;
@@ -2579,6 +2580,12 @@ declare namespace tracer {
2579
2580
 
2580
2581
  /**
2581
2582
  * This plugin automatically instruments the
2583
+ * [langgraph](https://github.com/npmjs/package/langgraph) library.
2584
+ */
2585
+ interface langgraph extends Instrumentation {}
2586
+
2587
+ /**
2588
+ * This plugin automatically instruments the
2582
2589
  * [ldapjs](https://github.com/ldapjs/node-ldapjs/) module.
2583
2590
  */
2584
2591
  interface ldapjs extends Instrumentation {}
package/package.json CHANGED
@@ -1,11 +1,12 @@
1
1
  {
2
2
  "name": "dd-trace",
3
- "version": "5.90.0",
3
+ "version": "5.91.0",
4
4
  "description": "Datadog APM tracing client for JavaScript",
5
5
  "main": "index.js",
6
6
  "typings": "index.d.ts",
7
7
  "scripts": {
8
8
  "env": "bash ./plugin-env",
9
+ "prepare": "cd vendor && npm ci --include=dev",
9
10
  "preinstall": "node scripts/preinstall.js",
10
11
  "bench": "node benchmark/index.js",
11
12
  "bench:e2e:test-optimization": "node benchmark/e2e-test-optimization/benchmark-run.js",
@@ -146,13 +147,13 @@
146
147
  "@datadog/wasm-js-rewriter": "5.0.1",
147
148
  "@opentelemetry/api": ">=1.0.0 <1.10.0",
148
149
  "@opentelemetry/api-logs": "<1.0.0",
149
- "oxc-parser": "^0.116.0"
150
+ "oxc-parser": "^0.118.0"
150
151
  },
151
152
  "devDependencies": {
152
153
  "@actions/core": "^3.0.0",
153
154
  "@actions/github": "^9.0.0",
154
155
  "@babel/helpers": "^7.28.6",
155
- "@eslint/eslintrc": "^3.3.1",
156
+ "@eslint/eslintrc": "^3.3.5",
156
157
  "@eslint/js": "^9.39.2",
157
158
  "@msgpack/msgpack": "^3.1.3",
158
159
  "@openfeature/core": "^1.8.1",
@@ -165,11 +166,11 @@
165
166
  "benchmark": "^2.1.4",
166
167
  "body-parser": "^2.2.2",
167
168
  "bun": "1.3.10",
168
- "codeowners-audit": "^2.7.1",
169
+ "codeowners-audit": "^2.9.0",
169
170
  "eslint": "^9.39.2",
170
- "eslint-plugin-cypress": "^6.1.0",
171
+ "eslint-plugin-cypress": "^6.2.0",
171
172
  "eslint-plugin-import": "^2.32.0",
172
- "eslint-plugin-jsdoc": "^62.5.0",
173
+ "eslint-plugin-jsdoc": "^62.8.0",
173
174
  "eslint-plugin-mocha": "^11.2.0",
174
175
  "eslint-plugin-n": "^17.23.2",
175
176
  "eslint-plugin-promise": "^7.2.1",
@@ -166,9 +166,9 @@ function getErrorFromCucumberResult (cucumberResult) {
166
166
  return error
167
167
  }
168
168
 
169
- function getChannelPromise (channelToPublishTo, isParallel = false, frameworkVersion = null) {
169
+ function getChannelPromise (channelToPublishTo, frameworkVersion = null) {
170
170
  return new Promise(resolve => {
171
- channelToPublishTo.publish({ onDone: resolve, isParallel, frameworkVersion })
171
+ channelToPublishTo.publish({ onDone: resolve, frameworkVersion })
172
172
  })
173
173
  }
174
174
 
@@ -505,7 +505,7 @@ function getWrappedStart (start, frameworkVersion, isParallel = false, isCoordin
505
505
  }
506
506
  let errorSkippableRequest
507
507
 
508
- const configurationResponse = await getChannelPromise(libraryConfigurationCh, isParallel, frameworkVersion)
508
+ const configurationResponse = await getChannelPromise(libraryConfigurationCh, frameworkVersion)
509
509
 
510
510
  isEarlyFlakeDetectionEnabled = configurationResponse.libraryConfig?.isEarlyFlakeDetectionEnabled
511
511
  earlyFlakeDetectionNumRetries = configurationResponse.libraryConfig?.earlyFlakeDetectionNumRetries
@@ -681,6 +681,7 @@ function getWrappedRunTestCase (runTestCaseFunction, isNewerCucumberVersion = fa
681
681
  let isQuarantined = false
682
682
  let isModified = false
683
683
 
684
+ const originalDryRun = this.options.dryRun
684
685
  if (isTestManagementTestsEnabled) {
685
686
  const testProperties = getTestProperties(testSuitePath, pickle.name)
686
687
  isAttemptToFix = testProperties.attemptToFix
@@ -719,6 +720,9 @@ function getWrappedRunTestCase (runTestCaseFunction, isNewerCucumberVersion = fa
719
720
  // TODO: for >=11 we could use `runTestCaseResult` instead of accumulating results in `lastStatusByPickleId`
720
721
  let runTestCaseResult = await runTestCaseFunction.apply(this, arguments)
721
722
 
723
+ // Restore dryRun so it doesn't affect subsequent tests in the same worker
724
+ this.options.dryRun = originalDryRun
725
+
722
726
  const testStatuses = lastStatusByPickleId.get(pickle.id)
723
727
  const lastTestStatus = testStatuses.at(-1)
724
728
 
@@ -1053,6 +1057,12 @@ addHook({
1053
1057
  this.options.worldParameters._ddIsFlakyTestRetriesEnabled = isFlakyTestRetriesEnabled
1054
1058
  this.options.worldParameters._ddNumTestRetries = numTestRetries
1055
1059
 
1060
+ if (isTestManagementTestsEnabled) {
1061
+ this.options.worldParameters._ddIsTestManagementTestsEnabled = true
1062
+ this.options.worldParameters._ddTestManagementTests = testManagementTests
1063
+ this.options.worldParameters._ddTestManagementAttemptToFixRetries = testManagementAttemptToFixRetries
1064
+ }
1065
+
1056
1066
  return startWorker.apply(this, arguments)
1057
1067
  })
1058
1068
  return adapterPackage
@@ -1090,6 +1100,11 @@ addHook({
1090
1100
  }
1091
1101
  isFlakyTestRetriesEnabled = !!this.options.worldParameters._ddIsFlakyTestRetriesEnabled
1092
1102
  numTestRetries = this.options.worldParameters._ddNumTestRetries ?? 0
1103
+ isTestManagementTestsEnabled = !!this.options.worldParameters._ddIsTestManagementTestsEnabled
1104
+ if (isTestManagementTestsEnabled) {
1105
+ testManagementTests = this.options.worldParameters._ddTestManagementTests
1106
+ testManagementAttemptToFixRetries = this.options.worldParameters._ddTestManagementAttemptToFixRetries
1107
+ }
1093
1108
  }
1094
1109
  )
1095
1110
  return workerPackage
@@ -4,6 +4,7 @@ module.exports = {
4
4
  '@anthropic-ai/sdk': { esmFirst: true, fn: () => require('../anthropic') },
5
5
  '@apollo/server': () => require('../apollo-server'),
6
6
  '@apollo/gateway': () => require('../apollo'),
7
+ '@langchain/langgraph': { esmFirst: true, fn: () => require('../langgraph') },
7
8
  'apollo-server-core': () => require('../apollo-server-core'),
8
9
  '@aws-sdk/smithy-client': () => require('../aws-sdk'),
9
10
  '@azure/event-hubs': () => require('../azure-event-hubs'),
@@ -4,4 +4,5 @@ module.exports = [
4
4
  ...require('./ai'),
5
5
  ...require('./bullmq'),
6
6
  ...require('./langchain'),
7
+ ...require('./langgraph'),
7
8
  ]
@@ -0,0 +1,30 @@
1
+ 'use strict'
2
+
3
+ module.exports = [
4
+ {
5
+ module: {
6
+ name: '@langchain/langgraph',
7
+ versionRange: '>=1.1.2',
8
+ filePath: 'dist/pregel/index.js',
9
+ },
10
+ functionQuery: {
11
+ methodName: 'stream',
12
+ className: 'Pregel',
13
+ kind: 'AsyncIterator',
14
+ },
15
+ channelName: 'Pregel_stream',
16
+ },
17
+ {
18
+ module: {
19
+ name: '@langchain/langgraph',
20
+ versionRange: '>=1.1.2',
21
+ filePath: 'dist/pregel/index.cjs',
22
+ },
23
+ functionQuery: {
24
+ methodName: 'stream',
25
+ className: 'Pregel',
26
+ kind: 'AsyncIterator',
27
+ },
28
+ channelName: 'Pregel_stream',
29
+ },
30
+ ]
@@ -702,11 +702,15 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) {
702
702
  const mightHitBreakpoint = this.isDiEnabled && numTestExecutions >= 2
703
703
 
704
704
  const ctx = testContexts.get(event.test)
705
+ if (!ctx) {
706
+ log.warn('"ci:jest:test_done": no context found for test "%s"', testName)
707
+ return
708
+ }
705
709
 
706
710
  const finalStatus = this.getFinalStatus(testName,
707
711
  status,
708
- !!ctx?.isNew,
709
- !!ctx?.isModified,
712
+ !!ctx.isNew,
713
+ !!ctx.isModified,
710
714
  isEfdRetry,
711
715
  isAttemptToFix,
712
716
  numTestExecutions)
@@ -761,6 +765,9 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) {
761
765
  efdDeterminedRetries.clear()
762
766
  efdSlowAbortedTests.clear()
763
767
  efdNewTestCandidates.clear()
768
+ retriedTestsToNumAttempts.clear()
769
+ attemptToFixRetriedTestsStatuses.clear()
770
+ testsToBeRetried.clear()
764
771
  }
765
772
  if (event.name === 'test_skip' || event.name === 'test_todo') {
766
773
  const testName = getJestTestName(event.test, this.getShouldStripSeedFromTestName())
@@ -0,0 +1,7 @@
1
+ 'use strict'
2
+
3
+ const { addHook, getHooks } = require('./helpers/instrument')
4
+
5
+ for (const hook of getHooks('@langchain/langgraph')) {
6
+ addHook(hook, exports => exports)
7
+ }
@@ -101,12 +101,12 @@ function getFilteredSuites (originalSuites) {
101
101
  }, { suitesToRun: [], skippedSuites: new Set() })
102
102
  }
103
103
 
104
- function getOnStartHandler (isParallel, frameworkVersion) {
104
+ function getOnStartHandler (frameworkVersion) {
105
105
  return function () {
106
106
  const processArgv = process.argv.slice(2).join(' ')
107
107
  const command = `mocha ${processArgv}`
108
108
  testSessionStartCh.publish({ command, frameworkVersion })
109
- if (!isParallel && skippedSuites.length) {
109
+ if (skippedSuites.length) {
110
110
  itrSkippedSuitesCh.publish({ skippedSuites, frameworkVersion })
111
111
  }
112
112
  }
@@ -315,8 +315,7 @@ function getExecutionConfiguration (runner, isParallel, frameworkVersion, onFini
315
315
  config.isTestManagementTestsEnabled = libraryConfig.isTestManagementEnabled
316
316
  config.testManagementAttemptToFixRetries = libraryConfig.testManagementAttemptToFixRetries
317
317
  config.isImpactedTestsEnabled = libraryConfig.isImpactedTestsEnabled
318
- // ITR is not supported in parallel mode yet
319
- config.isSuitesSkippingEnabled = !isParallel && libraryConfig.isSuitesSkippingEnabled
318
+ config.isSuitesSkippingEnabled = libraryConfig.isSuitesSkippingEnabled
320
319
  config.isFlakyTestRetriesEnabled = libraryConfig.isFlakyTestRetriesEnabled
321
320
  config.flakyTestRetriesCount = libraryConfig.flakyTestRetriesCount
322
321
 
@@ -452,7 +451,7 @@ addHook({
452
451
 
453
452
  const { suitesByTestFile, numSuitesByTestFile } = getSuitesByTestFile(this.suite)
454
453
 
455
- this.once('start', getOnStartHandler(false, frameworkVersion))
454
+ this.once('start', getOnStartHandler(frameworkVersion))
456
455
 
457
456
  this.once('end', getOnEndHandler(false))
458
457
 
@@ -623,9 +622,16 @@ addHook({
623
622
  return run.apply(this, arguments)
624
623
  }
625
624
 
626
- this.once('start', getOnStartHandler(true, frameworkVersion))
625
+ this.once('start', getOnStartHandler(frameworkVersion))
627
626
  this.once('end', getOnEndHandler(true))
628
627
 
628
+ // Populate unskippable suites before config is fetched (matches serial mode at Mocha.prototype.run)
629
+ for (const filePath of files) {
630
+ if (isMarkedAsUnskippable({ path: filePath })) {
631
+ unskippableSuites.push(filePath)
632
+ }
633
+ }
634
+
629
635
  getExecutionConfiguration(this, true, frameworkVersion, () => {
630
636
  if (config.isKnownTestsEnabled) {
631
637
  const testSuites = files.map(file => getTestSuitePath(file, process.cwd()))
@@ -640,7 +646,25 @@ addHook({
640
646
  config.isEarlyFlakeDetectionFaulty = true
641
647
  }
642
648
  }
643
- run.apply(this, arguments)
649
+ if (config.isSuitesSkippingEnabled && suitesToSkip.length) {
650
+ const filteredFiles = []
651
+ const skippedFiles = []
652
+ for (const file of files) {
653
+ const testPath = getTestSuitePath(file, process.cwd())
654
+ const shouldSkip = suitesToSkip.includes(testPath)
655
+ const isUnskippable = unskippableSuites.includes(file)
656
+ if (shouldSkip && !isUnskippable) {
657
+ skippedFiles.push(testPath)
658
+ } else {
659
+ filteredFiles.push(file)
660
+ }
661
+ }
662
+ isSuitesSkipped = skippedFiles.length > 0
663
+ skippedSuites = skippedFiles
664
+ run.apply(this, [cb, { files: filteredFiles }])
665
+ } else {
666
+ run.apply(this, arguments)
667
+ }
644
668
  })
645
669
 
646
670
  return this
@@ -694,8 +718,7 @@ addHook({
694
718
  if (config.isTestManagementTestsEnabled) {
695
719
  const testSuiteTestManagementTests = config.testManagementTests?.mocha?.suites?.[testPath] || {}
696
720
  newWorkerArgs._ddIsTestManagementTestsEnabled = true
697
- // TODO: attempt to fix does not work in parallel mode yet
698
- // newWorkerArgs._ddTestManagementAttemptToFixRetries = config.testManagementAttemptToFixRetries
721
+ newWorkerArgs._ddTestManagementAttemptToFixRetries = config.testManagementAttemptToFixRetries
699
722
  newWorkerArgs._ddTestManagementTests = {
700
723
  mocha: {
701
724
  suites: {
@@ -140,7 +140,6 @@ function runnableWrapper (RunnablePackage, libraryConfig) {
140
140
  if (!testFinishCh.hasSubscribers) {
141
141
  return run.apply(this, arguments)
142
142
  }
143
- // Flaky test retries does not work in parallel mode
144
143
  if (libraryConfig?.isFlakyTestRetriesEnabled) {
145
144
  this.retries(libraryConfig?.flakyTestRetriesCount)
146
145
  }
@@ -43,10 +43,10 @@ addHook({
43
43
  }
44
44
  if (this.options._ddIsTestManagementTestsEnabled) {
45
45
  config.isTestManagementTestsEnabled = true
46
- // TODO: attempt to fix does not work in parallel mode yet
47
- // config.testManagementAttemptToFixRetries = this.options._ddTestManagementAttemptToFixRetries
46
+ config.testManagementAttemptToFixRetries = this.options._ddTestManagementAttemptToFixRetries
48
47
  config.testManagementTests = this.options._ddTestManagementTests
49
48
  delete this.options._ddIsTestManagementTestsEnabled
49
+ delete this.options._ddTestManagementAttemptToFixRetries
50
50
  delete this.options._ddTestManagementTests
51
51
  }
52
52
  if (this.options._ddIsFlakyTestRetriesEnabled) {
@@ -146,8 +146,23 @@ function isReporterPackageNewest (vitestPackage) {
146
146
  return vitestPackage.h?.name === 'BaseSequencer'
147
147
  }
148
148
 
149
- function isBaseSequencer (vitestPackage) {
150
- return vitestPackage.b?.name === 'BaseSequencer'
149
+ /**
150
+ * Finds an export by its `.name` property in a minified vitest chunk.
151
+ * Minified export keys change across versions, so we search by function/class name.
152
+ * @param {object} pkg - The module exports object
153
+ * @param {string} name - The `.name` value to look for
154
+ * @returns {{ key: string, value: Function } | undefined}
155
+ */
156
+ function findExportByName (pkg, name) {
157
+ for (const [key, value] of Object.entries(pkg)) {
158
+ if (value?.name === name) {
159
+ return { key, value }
160
+ }
161
+ }
162
+ }
163
+
164
+ function getBaseSequencerExport (vitestPackage) {
165
+ return findExportByName(vitestPackage, 'BaseSequencer')
151
166
  }
152
167
 
153
168
  function getChannelPromise (channelToPublishTo, frameworkVersion) {
@@ -157,19 +172,19 @@ function getChannelPromise (channelToPublishTo, frameworkVersion) {
157
172
  }
158
173
 
159
174
  function isCliApiPackage (vitestPackage) {
160
- return vitestPackage.s?.name === 'startVitest'
175
+ return !!findExportByName(vitestPackage, 'startVitest')
161
176
  }
162
177
 
163
- function isTestPackage (testPackage) {
164
- return testPackage.V?.name === 'VitestTestRunner'
178
+ function getTestRunnerExport (testPackage) {
179
+ return findExportByName(testPackage, 'VitestTestRunner') || findExportByName(testPackage, 'TestRunner')
165
180
  }
166
181
 
167
- function hasForksPoolWorker (vitestPackage) {
168
- return vitestPackage.f?.name === 'ForksPoolWorker'
182
+ function getForksPoolWorkerExport (vitestPackage) {
183
+ return findExportByName(vitestPackage, 'ForksPoolWorker')
169
184
  }
170
185
 
171
- function hasThreadsPoolWorker (vitestPackage) {
172
- return vitestPackage.T?.name === 'ThreadsPoolWorker'
186
+ function getThreadsPoolWorkerExport (vitestPackage) {
187
+ return findExportByName(vitestPackage, 'ThreadsPoolWorker')
173
188
  }
174
189
 
175
190
  function getSessionStatus (state) {
@@ -447,7 +462,11 @@ function getCliOrStartVitestWrapper (frameworkVersion) {
447
462
  }
448
463
 
449
464
  function getCreateCliWrapper (vitestPackage, frameworkVersion) {
450
- shimmer.wrap(vitestPackage, 'c', getCliOrStartVitestWrapper(frameworkVersion))
465
+ const createCliExport = findExportByName(vitestPackage, 'createCLI')
466
+ if (!createCliExport) {
467
+ return vitestPackage
468
+ }
469
+ shimmer.wrap(vitestPackage, createCliExport.key, getCliOrStartVitestWrapper(frameworkVersion))
451
470
 
452
471
  return vitestPackage
453
472
  }
@@ -534,27 +553,30 @@ function getStartVitestWrapper (cliApiPackage, frameworkVersion) {
534
553
  if (!isCliApiPackage(cliApiPackage)) {
535
554
  return cliApiPackage
536
555
  }
537
- shimmer.wrap(cliApiPackage, 's', getCliOrStartVitestWrapper(frameworkVersion))
556
+ const startVitestExport = findExportByName(cliApiPackage, 'startVitest')
557
+ shimmer.wrap(cliApiPackage, startVitestExport.key, getCliOrStartVitestWrapper(frameworkVersion))
538
558
 
539
- if (hasForksPoolWorker(cliApiPackage)) {
559
+ const forksPoolWorker = getForksPoolWorkerExport(cliApiPackage)
560
+ if (forksPoolWorker) {
540
561
  // function is async
541
- shimmer.wrap(cliApiPackage.f.prototype, 'start', start => function () {
562
+ shimmer.wrap(forksPoolWorker.value.prototype, 'start', start => function () {
542
563
  vitestPool = 'child_process'
543
564
  this.env.DD_VITEST_WORKER = '1'
544
565
 
545
566
  return start.apply(this, arguments)
546
567
  })
547
- shimmer.wrap(cliApiPackage.f.prototype, 'on', getWrappedOn)
568
+ shimmer.wrap(forksPoolWorker.value.prototype, 'on', getWrappedOn)
548
569
  }
549
570
 
550
- if (hasThreadsPoolWorker(cliApiPackage)) {
571
+ const threadsPoolWorker = getThreadsPoolWorkerExport(cliApiPackage)
572
+ if (threadsPoolWorker) {
551
573
  // function is async
552
- shimmer.wrap(cliApiPackage.T.prototype, 'start', start => function () {
574
+ shimmer.wrap(threadsPoolWorker.value.prototype, 'start', start => function () {
553
575
  vitestPool = 'worker_threads'
554
576
  this.env.DD_VITEST_WORKER = '1'
555
577
  return start.apply(this, arguments)
556
578
  })
557
- shimmer.wrap(cliApiPackage.T.prototype, 'on', getWrappedOn)
579
+ shimmer.wrap(threadsPoolWorker.value.prototype, 'on', getWrappedOn)
558
580
  }
559
581
  return cliApiPackage
560
582
  }
@@ -747,7 +769,10 @@ function wrapVitestTestRunner (VitestTestRunner) {
747
769
  }
748
770
 
749
771
  const lastExecutionStatus = task.result.state
750
- const shouldFlipStatus = isEarlyFlakeDetectionEnabled || attemptToFixTasks.has(task)
772
+ const isAtf = attemptToFixTasks.has(task)
773
+ const isQuarantinedOrDisabledAtf = isAtf && (quarantinedTasks.has(task) || disabledTasks.has(task))
774
+ const shouldTrackStatuses = isEarlyFlakeDetectionEnabled || isAtf
775
+ const shouldFlipStatus = isEarlyFlakeDetectionEnabled || isQuarantinedOrDisabledAtf
751
776
  const statuses = taskToStatuses.get(task)
752
777
 
753
778
  // These clauses handle task.repeats, whether EFD is enabled or not
@@ -765,8 +790,10 @@ function wrapVitestTestRunner (VitestTestRunner) {
765
790
  } else {
766
791
  testPassCh.publish({ task, ...ctx.currentStore })
767
792
  }
768
- if (shouldFlipStatus) {
793
+ if (shouldTrackStatuses) {
769
794
  statuses.push(lastExecutionStatus)
795
+ }
796
+ if (shouldFlipStatus) {
770
797
  // If we don't "reset" the result.state to "pass", once a repetition fails,
771
798
  // vitest will always consider the test as failed, so we can't read the actual status
772
799
  // This means that we change vitest's behavior:
@@ -776,7 +803,7 @@ function wrapVitestTestRunner (VitestTestRunner) {
776
803
  }
777
804
  }
778
805
  } else if (numRepetition === task.repeats) {
779
- if (shouldFlipStatus) {
806
+ if (shouldTrackStatuses) {
780
807
  statuses.push(lastExecutionStatus)
781
808
  }
782
809
 
@@ -864,11 +891,12 @@ addHook({
864
891
  versions: ['>=4.0.0'],
865
892
  filePattern: 'dist/chunks/test.*',
866
893
  }, (testPackage) => {
867
- if (!isTestPackage(testPackage)) {
894
+ const testRunner = getTestRunnerExport(testPackage)
895
+ if (!testRunner) {
868
896
  return testPackage
869
897
  }
870
898
 
871
- wrapVitestTestRunner(testPackage.V)
899
+ wrapVitestTestRunner(testRunner.value)
872
900
 
873
901
  return testPackage
874
902
  })
@@ -937,8 +965,9 @@ addHook({
937
965
  versions: ['>=3.0.9'],
938
966
  filePattern: 'dist/chunks/coverage.*',
939
967
  }, (coveragePackage) => {
940
- if (isBaseSequencer(coveragePackage)) {
941
- shimmer.wrap(coveragePackage.b.prototype, 'sort', getSortWrapper)
968
+ const baseSequencer = getBaseSequencerExport(coveragePackage)
969
+ if (baseSequencer) {
970
+ shimmer.wrap(baseSequencer.value.prototype, 'sort', getSortWrapper)
942
971
  }
943
972
  return coveragePackage
944
973
  })
@@ -607,7 +607,7 @@ class CypressPlugin {
607
607
  [TEST_SESSION_NAME]: testSessionName,
608
608
  }
609
609
  }
610
- const libraryCapabilitiesTags = getLibraryCapabilitiesTags(this.constructor.id, false, this.frameworkVersion)
610
+ const libraryCapabilitiesTags = getLibraryCapabilitiesTags(this.constructor.id, this.frameworkVersion)
611
611
  metadataTags.test = {
612
612
  ...metadataTags.test,
613
613
  ...libraryCapabilitiesTags,
@@ -61,18 +61,16 @@ Cypress.on('fail', (err, runnable) => {
61
61
  }
62
62
 
63
63
  const testName = runnable.fullTitle()
64
- const { isQuarantined, isAttemptToFix } = getTestProperties(testName)
64
+ const { isQuarantined, isDisabled } = getTestProperties(testName)
65
65
 
66
- // For pure quarantined tests (not attemptToFix), suppress the failure
67
- // This makes the test "pass" from Cypress's perspective while we still track the error
68
- if (isQuarantined && !isAttemptToFix) {
69
- // Store the error so we can report it to Datadog in afterEach
66
+ // Suppress failures for quarantined or disabled tests so they don't affect the exit code.
67
+ // This applies regardless of attempt-to-fix status: per spec, quarantined/disabled test
68
+ // results are always ignored.
69
+ if (isQuarantined || isDisabled) {
70
70
  quarantinedTestErrors.set(testName, err)
71
- // Don't re-throw - this prevents Cypress from marking the test as failed
72
71
  return
73
72
  }
74
73
 
75
- // For all other tests (including attemptToFix), let the error propagate normally
76
74
  throw err
77
75
  })
78
76
 
@@ -0,0 +1,24 @@
1
+ 'use strict'
2
+
3
+ const CompositePlugin = require('../../dd-trace/src/plugins/composite')
4
+ const langgraphLLMObsPlugins = require('../../dd-trace/src/llmobs/plugins/langgraph')
5
+ const streamPlugin = require('./stream')
6
+
7
+ const plugins = {}
8
+
9
+ // CRITICAL: LLMObs plugins MUST come first
10
+ for (const Plugin of langgraphLLMObsPlugins) {
11
+ plugins[Plugin.id] = Plugin
12
+ }
13
+
14
+ // Tracing plugins second
15
+ for (const Plugin of streamPlugin) {
16
+ plugins[Plugin.id] = Plugin
17
+ }
18
+
19
+ class LanggraphPlugin extends CompositePlugin {
20
+ static id = 'langgraph'
21
+ static plugins = plugins
22
+ }
23
+
24
+ module.exports = LanggraphPlugin
@@ -0,0 +1,41 @@
1
+ 'use strict'
2
+
3
+ const TracingPlugin = require('../../dd-trace/src/plugins/tracing')
4
+ const { spanHasError } = require('../../dd-trace/src/llmobs/util')
5
+
6
+ // We are only tracing Pregel.stream because Pregel.invoke calls stream internally resulting in
7
+ // a graph with spans that look redundant.
8
+ class PregelStreamPlugin extends TracingPlugin {
9
+ static id = 'langgraph_pregel_stream'
10
+ static prefix = 'tracing:orchestrion:@langchain/langgraph:Pregel_stream'
11
+
12
+ bindStart (ctx) {
13
+ this.startSpan('LangGraph', {
14
+ service: this.config.service,
15
+ kind: 'internal',
16
+ component: 'langgraph',
17
+ }, ctx)
18
+ return ctx.currentStore
19
+ }
20
+ }
21
+ class NextStreamPlugin extends TracingPlugin {
22
+ static id = 'langgraph_stream_next'
23
+ static prefix = 'tracing:orchestrion:@langchain/langgraph:Pregel_stream_next'
24
+
25
+ bindStart (ctx) {
26
+ return ctx.currentStore
27
+ }
28
+
29
+ asyncEnd (ctx) {
30
+ const span = ctx.currentStore?.span
31
+ if (!span) return
32
+ if (ctx.result.done === true || spanHasError(span)) {
33
+ span.finish()
34
+ }
35
+ }
36
+ }
37
+
38
+ module.exports = [
39
+ PregelStreamPlugin,
40
+ NextStreamPlugin,
41
+ ]
@@ -106,6 +106,7 @@ const defaultsWithoutSupportedConfigurationEntry = {
106
106
  isGCPFunction: false,
107
107
  instrumentationSource: 'manual',
108
108
  isServiceUserProvided: false,
109
+ isServiceNameInferred: true,
109
110
  lookup: undefined,
110
111
  plugins: true,
111
112
  }
@@ -722,8 +722,10 @@ class Config {
722
722
  // Priority:
723
723
  // DD_SERVICE > tags.service > OTEL_SERVICE_NAME > NX_TASK_TARGET_PROJECT (if DD_ENABLE_NX_SERVICE_NAME) > default
724
724
  let serviceName = DD_SERVICE || tags.service || OTEL_SERVICE_NAME
725
+ let isServiceNameInferred
725
726
  if (!serviceName && NX_TASK_TARGET_PROJECT) {
726
727
  if (isTrue(DD_ENABLE_NX_SERVICE_NAME)) {
728
+ isServiceNameInferred = true
727
729
  serviceName = NX_TASK_TARGET_PROJECT
728
730
  } else if (DD_MAJOR < 6) {
729
731
  // Warn about v6 behavior change for Nx projects
@@ -734,6 +736,7 @@ class Config {
734
736
  }
735
737
  }
736
738
  setString(target, 'service', serviceName)
739
+ if (serviceName) setBoolean(target, 'isServiceNameInferred', isServiceNameInferred ?? false)
737
740
  if (DD_SERVICE_MAPPING) {
738
741
  target.serviceMapping = Object.fromEntries(
739
742
  DD_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
@@ -1004,7 +1007,11 @@ class Config {
1004
1007
  setUnit(opts, 'sampleRate', options.sampleRate ?? options.ingestion.sampleRate)
1005
1008
  opts['sampler.rateLimit'] = maybeInt(options.rateLimit ?? options.ingestion.rateLimit)
1006
1009
  setSamplingRule(opts, 'sampler.rules', options.samplingRules)
1007
- setString(opts, 'service', options.service || tags.service)
1010
+ const optService = options.service || tags.service
1011
+ setString(opts, 'service', optService)
1012
+ if (optService) {
1013
+ setBoolean(opts, 'isServiceNameInferred', false)
1014
+ }
1008
1015
  opts.serviceMapping = options.serviceMapping
1009
1016
  setString(opts, 'site', options.site)
1010
1017
  if (options.spanAttributeSchema) {
@@ -2995,6 +2995,13 @@
2995
2995
  "default": "true"
2996
2996
  }
2997
2997
  ],
2998
+ "DD_TRACE_LANGGRAPH_ENABLED": [
2999
+ {
3000
+ "implementation": "C",
3001
+ "type": "boolean",
3002
+ "default": "true"
3003
+ }
3004
+ ],
2998
3005
  "DD_TRACE_LDAPJS_ENABLED": [
2999
3006
  {
3000
3007
  "implementation": "A",
@@ -23,6 +23,7 @@ module.exports = {
23
23
  SPAN_SAMPLING_MAX_PER_SECOND: '_dd.span_sampling.max_per_second',
24
24
  DATADOG_LAMBDA_EXTENSION_PATH: '/opt/extensions/datadog-agent',
25
25
  DECISION_MAKER_KEY: '_dd.p.dm',
26
+ SAMPLING_KNUTH_RATE: '_dd.p.ksr',
26
27
  PROCESS_ID: 'process_id',
27
28
  ERROR_TYPE: 'error.type',
28
29
  ERROR_MESSAGE: 'error.message',
@@ -74,7 +74,7 @@ class Crashtracker {
74
74
  timeout_ms: 3000,
75
75
  },
76
76
  timeout: { secs: 5, nanos: 0 },
77
- demangle_names: false,
77
+ demangle_names: true,
78
78
  signals: [],
79
79
  resolve_frames: resolveMode,
80
80
  }
@@ -2,8 +2,11 @@
2
2
 
3
3
  const { workerData: { config: parentConfig, parentThreadId, configPort } } = require('node:worker_threads')
4
4
  const { getAgentUrl } = require('../../agent/url')
5
+ const processTags = require('../../process-tags')
5
6
  const log = require('./log')
6
7
 
8
+ processTags.initialize()
9
+
7
10
  const config = module.exports = {
8
11
  ...parentConfig,
9
12
  parentThreadId,
@@ -26,6 +26,7 @@ class DogStatsDClient {
26
26
  constructor (options = {}) {
27
27
  if (options.metricsProxyUrl) {
28
28
  this._httpOptions = {
29
+ method: 'POST',
29
30
  url: options.metricsProxyUrl.toString(),
30
31
  path: '/dogstatsd/v2/proxy',
31
32
  }
@@ -15,6 +15,17 @@ class LangChainLLMObsChainHandler extends LangChainLLMObsHandler {
15
15
  // chain spans will always be workflows
16
16
  this._tagger.tagTextIO(span, input, output)
17
17
  }
18
+
19
+ getName ({ span, instance }) {
20
+ const firstCallable = instance?.first
21
+
22
+ if (firstCallable?.constructor?.name === 'ChannelWrite') return
23
+
24
+ const firstCallableIsLangGraph = firstCallable?.lc_namespace?.includes('langgraph')
25
+ const firstCallableName = firstCallable?.name
26
+
27
+ return firstCallableIsLangGraph ? firstCallableName : super.getName({ span })
28
+ }
18
29
  }
19
30
 
20
31
  module.exports = LangChainLLMObsChainHandler
@@ -54,6 +54,8 @@ class BaseLangChainLLMObsPlugin extends LLMObsPlugin {
54
54
  const handler = this._handlers[ctx.type]
55
55
  const name = handler?.getName({ span, instance })
56
56
 
57
+ if (name == null) return
58
+
57
59
  return {
58
60
  modelProvider,
59
61
  modelName,
@@ -0,0 +1,114 @@
1
+ 'use strict'
2
+
3
+ const LLMObsPlugin = require('../base')
4
+ const { spanHasError } = require('../../util')
5
+
6
+ const streamDataMap = new WeakMap()
7
+
8
+ function formatIO (data) {
9
+ if (data == null) return ''
10
+
11
+ if (typeof data === 'string' || typeof data === 'number' || typeof data === 'boolean') {
12
+ return data
13
+ }
14
+
15
+ if (data.constructor?.name === 'Object') {
16
+ const formatted = {}
17
+ for (const [key, value] of Object.entries(data)) {
18
+ formatted[key] = formatIO(value)
19
+ }
20
+ return formatted
21
+ }
22
+
23
+ if (Array.isArray(data)) {
24
+ return data.map(item => formatIO(item))
25
+ }
26
+
27
+ try {
28
+ return JSON.stringify(data)
29
+ } catch {
30
+ return String(data)
31
+ }
32
+ }
33
+
34
+ class PregelStreamLLMObsPlugin extends LLMObsPlugin {
35
+ static id = 'llmobs_langgraph_pregel_stream'
36
+ static integration = 'langgraph'
37
+ static prefix = 'tracing:orchestrion:@langchain/langgraph:Pregel_stream'
38
+
39
+ getLLMObsSpanRegisterOptions (ctx) {
40
+ const name = ctx.self.name || 'LangGraph'
41
+
42
+ const enabled = this._tracerConfig.llmobs.enabled
43
+ if (!enabled) return
44
+
45
+ const span = ctx.currentStore?.span
46
+ if (!span) return
47
+ streamDataMap.set(span, {
48
+ streamInputs: ctx.arguments?.[0],
49
+ chunks: [],
50
+ })
51
+
52
+ return {
53
+ kind: 'workflow',
54
+ name,
55
+ }
56
+ }
57
+
58
+ asyncEnd () {}
59
+ }
60
+
61
+ class NextStreamLLMObsPlugin extends LLMObsPlugin {
62
+ static id = 'llmobs_langgraph_next_stream'
63
+ static prefix = 'tracing:orchestrion:@langchain/langgraph:Pregel_stream_next'
64
+
65
+ start () {} // no-op: span was already registered by PregelStreamLLMObsPlugin
66
+
67
+ end () {} // no-op: context restore is handled by PregelStreamLLMObsPlugin
68
+
69
+ error (ctx) {
70
+ const span = ctx.currentStore?.span
71
+ if (!span) return
72
+
73
+ this.#tagAndCleanup(span, true)
74
+ }
75
+
76
+ setLLMObsTags (ctx) {
77
+ const span = ctx.currentStore?.span
78
+ if (!span) return
79
+
80
+ // Accumulate chunks until done
81
+ if (ctx.result?.value && !ctx.result.done) {
82
+ const streamData = streamDataMap.get(span)
83
+ if (streamData) {
84
+ streamData.chunks.push(ctx.result.value)
85
+ }
86
+ return
87
+ }
88
+
89
+ // Tag on last chunk
90
+ if (ctx.result?.done) {
91
+ const hasError = ctx.error || spanHasError(span)
92
+ this.#tagAndCleanup(span, hasError)
93
+ }
94
+ }
95
+
96
+ #tagAndCleanup (span, hasError) {
97
+ const streamData = streamDataMap.get(span)
98
+ if (!streamData) return
99
+
100
+ const { streamInputs: inputs, chunks } = streamData
101
+ const input = inputs == null ? undefined : formatIO(inputs)
102
+ const lastChunk = chunks.length > 0 ? chunks[chunks.length - 1] : undefined
103
+ const output = !hasError && lastChunk != null ? formatIO(lastChunk) : undefined
104
+
105
+ this._tagger.tagTextIO(span, input, output)
106
+
107
+ streamDataMap.delete(span)
108
+ }
109
+ }
110
+
111
+ module.exports = [
112
+ PregelStreamLLMObsPlugin,
113
+ NextStreamLLMObsPlugin,
114
+ ]
@@ -125,7 +125,7 @@ module.exports = class CiPlugin extends Plugin {
125
125
  this._pendingRequestErrorTags = []
126
126
 
127
127
  this.addSub(`ci:${this.constructor.id}:library-configuration`, (ctx) => {
128
- const { onDone, isParallel, frameworkVersion } = ctx
128
+ const { onDone, frameworkVersion } = ctx
129
129
  ctx.currentStore = storage('legacy').getStore()
130
130
 
131
131
  if (!this.tracer._exporter || !this.tracer._exporter.getLibraryConfiguration) {
@@ -143,7 +143,7 @@ module.exports = class CiPlugin extends Plugin {
143
143
  ? getSessionRequestErrorTags(this.testSessionSpan)
144
144
  : Object.fromEntries(this._pendingRequestErrorTags.map(({ tag, value }) => [tag, value]))
145
145
 
146
- const libraryCapabilitiesTags = getLibraryCapabilitiesTags(this.constructor.id, isParallel, frameworkVersion)
146
+ const libraryCapabilitiesTags = getLibraryCapabilitiesTags(this.constructor.id, frameworkVersion)
147
147
  const metadataTags = {
148
148
  test: {
149
149
  ...libraryCapabilitiesTags,
@@ -31,6 +31,7 @@ const plugins = {
31
31
  get '@redis/client' () { return require('../../../datadog-plugin-redis/src') },
32
32
  get '@smithy/smithy-client' () { return require('../../../datadog-plugin-aws-sdk/src') },
33
33
  get '@vitest/runner' () { return require('../../../datadog-plugin-vitest/src') },
34
+ get '@langchain/langgraph' () { return require('../../../datadog-plugin-langgraph/src') },
34
35
  get aerospike () { return require('../../../datadog-plugin-aerospike/src') },
35
36
  get ai () { return require('../../../datadog-plugin-ai/src') },
36
37
  get amqp10 () { return require('../../../datadog-plugin-amqp10/src') },
@@ -150,7 +150,6 @@ const DD_CAPABILITIES_FAILED_TEST_REPLAY = '_dd.library_capabilities.failed_test
150
150
  const DD_CI_LIBRARY_CONFIGURATION_ERROR = '_dd.ci.library_configuration_error'
151
151
 
152
152
  const UNSUPPORTED_TIA_FRAMEWORKS = new Set(['playwright', 'vitest'])
153
- const UNSUPPORTED_TIA_FRAMEWORKS_PARALLEL_MODE = new Set(['cucumber', 'mocha'])
154
153
  const MINIMUM_FRAMEWORK_VERSION_FOR_EFD = {
155
154
  playwright: '>=1.38.0',
156
155
  }
@@ -170,7 +169,6 @@ const MINIMUM_FRAMEWORK_VERSION_FOR_FAILED_TEST_REPLAY = {
170
169
  playwright: '>=1.38.0',
171
170
  }
172
171
 
173
- const UNSUPPORTED_ATTEMPT_TO_FIX_FRAMEWORKS_PARALLEL_MODE = new Set(['mocha'])
174
172
  const NOT_SUPPORTED_GRANULARITY_IMPACTED_TESTS_FRAMEWORKS = new Set(['mocha', 'playwright', 'vitest'])
175
173
 
176
174
  const TEST_LEVEL_EVENT_TYPES = [
@@ -987,9 +985,8 @@ function getFormattedError (error, repositoryRoot) {
987
985
  return newError
988
986
  }
989
987
 
990
- function isTiaSupported (testFramework, isParallel) {
991
- return !(UNSUPPORTED_TIA_FRAMEWORKS.has(testFramework) ||
992
- (isParallel && UNSUPPORTED_TIA_FRAMEWORKS_PARALLEL_MODE.has(testFramework)))
988
+ function isTiaSupported (testFramework) {
989
+ return !UNSUPPORTED_TIA_FRAMEWORKS.has(testFramework)
993
990
  }
994
991
 
995
992
  function isEarlyFlakeDetectionSupported (testFramework, frameworkVersion) {
@@ -1016,12 +1013,12 @@ function isDisableSupported (testFramework, frameworkVersion) {
1016
1013
  : true
1017
1014
  }
1018
1015
 
1019
- function isAttemptToFixSupported (testFramework, isParallel, frameworkVersion) {
1016
+ function isAttemptToFixSupported (testFramework, frameworkVersion) {
1020
1017
  if (testFramework === 'playwright') {
1021
1018
  return satisfies(frameworkVersion, MINIMUM_FRAMEWORK_VERSION_FOR_ATTEMPT_TO_FIX[testFramework])
1022
1019
  }
1023
1020
 
1024
- return !(isParallel && UNSUPPORTED_ATTEMPT_TO_FIX_FRAMEWORKS_PARALLEL_MODE.has(testFramework))
1021
+ return true
1025
1022
  }
1026
1023
 
1027
1024
  function isFailedTestReplaySupported (testFramework, frameworkVersion) {
@@ -1030,9 +1027,9 @@ function isFailedTestReplaySupported (testFramework, frameworkVersion) {
1030
1027
  : true
1031
1028
  }
1032
1029
 
1033
- function getLibraryCapabilitiesTags (testFramework, isParallel, frameworkVersion) {
1030
+ function getLibraryCapabilitiesTags (testFramework, frameworkVersion) {
1034
1031
  return {
1035
- [DD_CAPABILITIES_TEST_IMPACT_ANALYSIS]: isTiaSupported(testFramework, isParallel)
1032
+ [DD_CAPABILITIES_TEST_IMPACT_ANALYSIS]: isTiaSupported(testFramework)
1036
1033
  ? '1'
1037
1034
  : undefined,
1038
1035
  [DD_CAPABILITIES_EARLY_FLAKE_DETECTION]: isEarlyFlakeDetectionSupported(testFramework, frameworkVersion)
@@ -1049,7 +1046,7 @@ function getLibraryCapabilitiesTags (testFramework, isParallel, frameworkVersion
1049
1046
  ? '1'
1050
1047
  : undefined,
1051
1048
  [DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX]:
1052
- isAttemptToFixSupported(testFramework, isParallel, frameworkVersion)
1049
+ isAttemptToFixSupported(testFramework, frameworkVersion)
1053
1050
  ? '5'
1054
1051
  : undefined,
1055
1052
  [DD_CAPABILITIES_FAILED_TEST_REPLAY]: isFailedTestReplaySupported(testFramework, frameworkVersion)
@@ -31,10 +31,21 @@ const {
31
31
  SAMPLING_LIMIT_DECISION,
32
32
  SAMPLING_AGENT_DECISION,
33
33
  DECISION_MAKER_KEY,
34
+ SAMPLING_KNUTH_RATE,
34
35
  } = require('./constants')
35
36
 
36
37
  const DEFAULT_KEY = 'service:,env:'
37
38
 
39
+ /**
40
+ * Formats a sampling rate as a string with up to 6 significant digits and no trailing zeros.
41
+ *
42
+ * @param {number} rate
43
+ * @returns {string}
44
+ */
45
+ function formatKnuthRate (rate) {
46
+ return Number(rate.toPrecision(6)).toString()
47
+ }
48
+
38
49
  const defaultSampler = new Sampler(AUTO_KEEP)
39
50
 
40
51
  /**
@@ -254,6 +265,7 @@ class PrioritySampler {
254
265
  */
255
266
  #getPriorityByRule (context, rule) {
256
267
  context._trace[SAMPLING_RULE_DECISION] = rule.sampleRate
268
+ context._trace.tags[SAMPLING_KNUTH_RATE] = formatKnuthRate(rule.sampleRate)
257
269
  context._sampling.mechanism = SAMPLING_MECHANISM_RULE
258
270
  if (rule.provenance === 'customer') context._sampling.mechanism = SAMPLING_MECHANISM_REMOTE_USER
259
271
  if (rule.provenance === 'dynamic') context._sampling.mechanism = SAMPLING_MECHANISM_REMOTE_DYNAMIC
@@ -290,9 +302,15 @@ class PrioritySampler {
290
302
  // TODO: Change underscored properties to private ones.
291
303
  const sampler = this._samplers[key] || this._samplers[DEFAULT_KEY]
292
304
 
293
- context._trace[SAMPLING_AGENT_DECISION] = sampler.rate()
305
+ const rate = sampler.rate()
306
+ context._trace[SAMPLING_AGENT_DECISION] = rate
294
307
 
295
- context._sampling.mechanism = sampler === defaultSampler ? SAMPLING_MECHANISM_DEFAULT : SAMPLING_MECHANISM_AGENT
308
+ if (sampler === defaultSampler) {
309
+ context._sampling.mechanism = SAMPLING_MECHANISM_DEFAULT
310
+ } else {
311
+ context._trace.tags[SAMPLING_KNUTH_RATE] = formatKnuthRate(rate)
312
+ context._sampling.mechanism = SAMPLING_MECHANISM_AGENT
313
+ }
296
314
 
297
315
  return sampler.isSampled(context) ? AUTO_KEEP : AUTO_REJECT
298
316
  }
@@ -12,8 +12,19 @@ const ENTRYPOINT_PATH = require.main?.filename || ''
12
12
  // entrypoint.basedir = baz
13
13
  // package.json.name = <from package.json>
14
14
 
15
- // process tags are constant throughout the lifetime of a process
16
- function getProcessTags () {
15
+ /**
16
+ * Sanitize a process tag value
17
+ *
18
+ * @param {string} value
19
+ * @returns {string}
20
+ */
21
+ function sanitize (value) {
22
+ return String(value)
23
+ .toLowerCase()
24
+ .replaceAll(/[^a-zA-Z0-9/_.-]+/g, '_')
25
+ }
26
+
27
+ function buildProcessTags (config) {
17
28
  // Lazy load pkg to avoid issues with require.main during test initialization
18
29
  const pkg = require('../pkg')
19
30
 
@@ -35,6 +46,13 @@ function getProcessTags () {
35
46
  ['package.json.name', pkg.name || undefined],
36
47
  ]
37
48
 
49
+ // If config dependent tags keep growing, we should consider moving this into a function
50
+ if (config?.isServiceNameInferred) {
51
+ tags.push(['svc.auto', config.service])
52
+ } else if (config) {
53
+ tags.push(['svc.user', true])
54
+ }
55
+
38
56
  const tagsArray = []
39
57
  const tagsObject = {}
40
58
 
@@ -46,38 +64,27 @@ function getProcessTags () {
46
64
  }
47
65
  }
48
66
 
49
- const serialized = tagsArray.join(',')
50
-
51
- return {
52
- tags,
53
- serialized,
54
- tagsObject,
55
- tagsArray,
56
- }
67
+ processTags.tags = tags
68
+ processTags.serialized = tagsArray.join(',')
69
+ processTags.tagsObject = tagsObject
70
+ processTags.tagsArray = tagsArray
57
71
  }
58
72
 
59
- // Export the singleton
60
- module.exports = getProcessTags()
61
-
62
- module.exports.TRACING_FIELD_NAME = '_dd.tags.process'
63
- module.exports.DSM_FIELD_NAME = 'ProcessTags'
64
- module.exports.PROFILING_FIELD_NAME = 'process_tags'
65
- module.exports.DYNAMIC_INSTRUMENTATION_FIELD_NAME = 'process_tags'
66
- module.exports.TELEMETRY_FIELD_NAME = 'process_tags'
67
- module.exports.REMOTE_CONFIG_FIELD_NAME = 'process_tags'
68
- module.exports.CRASH_TRACKING_FIELD_NAME = 'process_tags'
69
- module.exports.CLIENT_TRACE_STATISTICS_FIELD_NAME = 'ProcessTags'
70
-
71
- /**
72
- * Sanitize a process tag value
73
- *
74
- * @param {string} value
75
- * @returns {string}
76
- */
77
- function sanitize (value) {
78
- return String(value)
79
- .toLowerCase()
80
- .replaceAll(/[^a-zA-Z0-9/_.-]+/g, '_')
73
+ // Singleton with constant defaults so pre-init reads don't blow up
74
+ const processTags = module.exports = {
75
+ initialize (config) {
76
+ // check if one of the properties added during build exist and if so return
77
+ if (processTags.tags) return
78
+ buildProcessTags(config)
79
+ },
80
+
81
+ TRACING_FIELD_NAME: '_dd.tags.process',
82
+ DSM_FIELD_NAME: 'ProcessTags',
83
+ PROFILING_FIELD_NAME: 'process_tags',
84
+ DYNAMIC_INSTRUMENTATION_FIELD_NAME: 'process_tags',
85
+ TELEMETRY_FIELD_NAME: 'process_tags',
86
+ REMOTE_CONFIG_FIELD_NAME: 'process_tags',
87
+ CRASH_TRACKING_FIELD_NAME: 'process_tags',
88
+ CLIENT_TRACE_STATISTICS_FIELD_NAME: 'ProcessTags',
89
+ sanitize,
81
90
  }
82
-
83
- module.exports.sanitize = sanitize
@@ -290,7 +290,15 @@ class NativeWallProfiler {
290
290
  }
291
291
 
292
292
  profilingContext = { spanId, rootSpanId, webTags }
293
- span[ProfilingContext] = profilingContext
293
+ // Don't cache if endpoint collection is enabled and webTags is undefined but
294
+ // the span's type hasn't been set yet. TracingPlugin.startSpan() calls
295
+ // enterWith() before the plugin sets span.type='web' via addRequestTags(),
296
+ // so the first enterCh event fires before the type is known. Without this
297
+ // guard we'd cache webTags=undefined and then serve that stale value on the
298
+ // subsequent activation (when span.type='web' is already set).
299
+ if (!this.#endpointCollectionEnabled || webTags !== undefined || context._tags['span.type']) {
300
+ span[ProfilingContext] = profilingContext
301
+ }
294
302
  }
295
303
  return profilingContext
296
304
  }
@@ -13,6 +13,7 @@ const nomenclature = require('./service-naming')
13
13
  const PluginManager = require('./plugin_manager')
14
14
  const NoopDogStatsDClient = require('./noop/dogstatsd')
15
15
  const { IS_SERVERLESS } = require('./serverless')
16
+ const processTags = require('./process-tags')
16
17
  const {
17
18
  setBaggageItem,
18
19
  getBaggageItem,
@@ -102,6 +103,9 @@ class Tracer extends NoopProxy {
102
103
  try {
103
104
  const config = getConfig(options) // TODO: support dynamic code config
104
105
 
106
+ // Add config dependent process tags
107
+ processTags.initialize(config)
108
+
105
109
  // Configure propagation hash manager for process tags + container tags
106
110
  const propagationHash = require('./propagation-hash')
107
111
  propagationHash.configure(config)