dd-trace 5.19.0 → 5.20.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/ext/formats.d.ts +1 -0
  2. package/ext/formats.js +2 -1
  3. package/init.js +3 -15
  4. package/package.json +3 -2
  5. package/packages/datadog-instrumentations/src/helpers/register.js +13 -11
  6. package/packages/datadog-instrumentations/src/http/client.js +7 -1
  7. package/packages/datadog-instrumentations/src/http/server.js +50 -13
  8. package/packages/datadog-instrumentations/src/mocha/main.js +21 -8
  9. package/packages/datadog-instrumentations/src/process.js +29 -0
  10. package/packages/datadog-instrumentations/src/vitest.js +47 -23
  11. package/packages/datadog-plugin-aws-sdk/src/base.js +15 -1
  12. package/packages/datadog-plugin-aws-sdk/src/services/kinesis.js +1 -1
  13. package/packages/datadog-plugin-aws-sdk/src/services/sns.js +1 -1
  14. package/packages/datadog-plugin-aws-sdk/src/services/sqs.js +3 -3
  15. package/packages/datadog-plugin-cypress/src/cypress-plugin.js +37 -8
  16. package/packages/datadog-plugin-vitest/src/index.js +2 -1
  17. package/packages/dd-trace/src/appsec/blocking.js +10 -1
  18. package/packages/dd-trace/src/appsec/channels.js +4 -1
  19. package/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js +1 -0
  20. package/packages/dd-trace/src/appsec/iast/analyzers/code-injection-analyzer.js +16 -0
  21. package/packages/dd-trace/src/appsec/iast/analyzers/weak-hash-analyzer.js +2 -0
  22. package/packages/dd-trace/src/appsec/iast/taint-tracking/csi-methods.js +2 -1
  23. package/packages/dd-trace/src/appsec/iast/taint-tracking/taint-tracking-impl.js +11 -0
  24. package/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/code-injection-sensitive-analyzer.js +25 -0
  25. package/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js +2 -0
  26. package/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-regex.js +2 -2
  27. package/packages/dd-trace/src/appsec/iast/vulnerabilities.js +1 -0
  28. package/packages/dd-trace/src/appsec/index.js +12 -7
  29. package/packages/dd-trace/src/appsec/rasp.js +121 -7
  30. package/packages/dd-trace/src/appsec/recommended.json +220 -2
  31. package/packages/dd-trace/src/config.js +41 -42
  32. package/packages/dd-trace/src/data_streams.js +44 -0
  33. package/packages/dd-trace/src/datastreams/pathway.js +4 -2
  34. package/packages/dd-trace/src/log/index.js +32 -0
  35. package/packages/dd-trace/src/opentracing/propagation/text_map_dsm.js +43 -0
  36. package/packages/dd-trace/src/opentracing/tracer.js +10 -6
  37. package/packages/dd-trace/src/plugins/ci_plugin.js +9 -2
  38. package/packages/dd-trace/src/plugins/plugin.js +12 -1
  39. package/packages/dd-trace/src/proxy.js +1 -0
  40. package/packages/dd-trace/src/tracer.js +2 -0
package/ext/formats.d.ts CHANGED
@@ -5,6 +5,7 @@ declare const formats: {
5
5
  HTTP_HEADERS: typeof opentracing.FORMAT_HTTP_HEADERS
6
6
  BINARY: typeof opentracing.FORMAT_BINARY
7
7
  LOG: 'log'
8
+ TEXT_MAP_DSM: 'text_map_dsm'
8
9
  }
9
10
 
10
11
  export = formats
package/ext/formats.js CHANGED
@@ -4,5 +4,6 @@ module.exports = {
4
4
  TEXT_MAP: 'text_map',
5
5
  HTTP_HEADERS: 'http_headers',
6
6
  BINARY: 'binary',
7
- LOG: 'log'
7
+ LOG: 'log',
8
+ TEXT_MAP_DSM: 'text_map_dsm'
8
9
  }
package/init.js CHANGED
@@ -2,22 +2,10 @@
2
2
 
3
3
  const path = require('path')
4
4
  const Module = require('module')
5
- const telemetry = require('./packages/dd-trace/src/telemetry/init-telemetry')
6
5
  const semver = require('semver')
7
-
8
- function isTrue (envVar) {
9
- return ['1', 'true', 'True'].includes(envVar)
10
- }
11
-
12
- // eslint-disable-next-line no-console
13
- let log = { info: isTrue(process.env.DD_TRACE_DEBUG) ? console.log : () => {} }
14
- if (semver.satisfies(process.versions.node, '>=16')) {
15
- const Config = require('./packages/dd-trace/src/config')
16
- log = require('./packages/dd-trace/src/log')
17
-
18
- // eslint-disable-next-line no-new
19
- new Config() // we need this to initialize the logger
20
- }
6
+ const log = require('./packages/dd-trace/src/log')
7
+ const { isTrue } = require('./packages/dd-trace/src/util')
8
+ const telemetry = require('./packages/dd-trace/src/telemetry/init-telemetry')
21
9
 
22
10
  let initBailout = false
23
11
  let clobberBailout = false
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "dd-trace",
3
- "version": "5.19.0",
3
+ "version": "5.20.0",
4
4
  "description": "Datadog APM tracing client for JavaScript",
5
5
  "main": "index.js",
6
6
  "typings": "index.d.ts",
@@ -33,6 +33,7 @@
33
33
  "test:profiler": "tap \"packages/dd-trace/test/profiling/**/*.spec.js\"",
34
34
  "test:profiler:ci": "npm run test:profiler -- --coverage --nyc-arg=--include=\"packages/dd-trace/src/profiling/**/*.js\"",
35
35
  "test:integration": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/*.spec.js\"",
36
+ "test:integration:appsec": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/appsec/*.spec.js\"",
36
37
  "test:integration:cucumber": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/cucumber/*.spec.js\"",
37
38
  "test:integration:cypress": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/cypress/*.spec.js\"",
38
39
  "test:integration:jest": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/jest/*.spec.js\"",
@@ -72,7 +73,7 @@
72
73
  },
73
74
  "dependencies": {
74
75
  "@datadog/native-appsec": "8.0.1",
75
- "@datadog/native-iast-rewriter": "2.3.1",
76
+ "@datadog/native-iast-rewriter": "2.4.0",
76
77
  "@datadog/native-iast-taint-tracking": "3.0.0",
77
78
  "@datadog/native-metrics": "^2.0.0",
78
79
  "@datadog/pprof": "5.3.0",
@@ -29,6 +29,10 @@ if (!disabledInstrumentations.has('fetch')) {
29
29
  require('../fetch')
30
30
  }
31
31
 
32
+ if (!disabledInstrumentations.has('process')) {
33
+ require('../process')
34
+ }
35
+
32
36
  const HOOK_SYMBOL = Symbol('hookExportsMap')
33
37
 
34
38
  if (DD_TRACE_DEBUG && DD_TRACE_DEBUG.toLowerCase() !== 'false') {
@@ -88,16 +92,13 @@ for (const packageName of names) {
88
92
  if (matchesFile) {
89
93
  const version = moduleVersion || getVersion(moduleBaseDir)
90
94
  if (!Object.hasOwnProperty(namesAndSuccesses, name)) {
91
- namesAndSuccesses[name] = {
92
- success: false,
93
- version
94
- }
95
+ namesAndSuccesses[`${name}@${version}`] = false
95
96
  }
96
97
 
97
98
  if (matchVersion(version, versions)) {
98
99
  // Check if the hook already has a set moduleExport
99
100
  if (hook[HOOK_SYMBOL].has(moduleExports)) {
100
- namesAndSuccesses[name].success = true
101
+ namesAndSuccesses[`${name}@${version}`] = true
101
102
  return moduleExports
102
103
  }
103
104
 
@@ -117,19 +118,20 @@ for (const packageName of names) {
117
118
  `integration_version:${version}`
118
119
  ])
119
120
  }
120
- namesAndSuccesses[name].success = true
121
+ namesAndSuccesses[`${name}@${version}`] = true
121
122
  }
122
123
  }
123
124
  }
124
- for (const name of Object.keys(namesAndSuccesses)) {
125
- const { success, version } = namesAndSuccesses[name]
126
- if (!success && !seenCombo.has(`${name}@${version}`)) {
125
+ for (const nameVersion of Object.keys(namesAndSuccesses)) {
126
+ const [name, version] = nameVersion.split('@')
127
+ const success = namesAndSuccesses[nameVersion]
128
+ if (!success && !seenCombo.has(nameVersion)) {
127
129
  telemetry('abort.integration', [
128
130
  `integration:${name}`,
129
131
  `integration_version:${version}`
130
132
  ])
131
- log.info(`Found incompatible integration version: ${name}@${version}`)
132
- seenCombo.add(`${name}@${version}`)
133
+ log.info(`Found incompatible integration version: ${nameVersion}`)
134
+ seenCombo.add(nameVersion)
133
135
  }
134
136
  }
135
137
 
@@ -43,7 +43,9 @@ function patch (http, methodName) {
43
43
  return request.apply(this, arguments)
44
44
  }
45
45
 
46
- const ctx = { args, http }
46
+ const abortController = new AbortController()
47
+
48
+ const ctx = { args, http, abortController }
47
49
 
48
50
  return startChannel.runStores(ctx, () => {
49
51
  let finished = false
@@ -107,6 +109,10 @@ function patch (http, methodName) {
107
109
  return emit.apply(this, arguments)
108
110
  }
109
111
 
112
+ if (abortController.signal.aborted) {
113
+ req.destroy(abortController.signal.reason || new Error('Aborted'))
114
+ }
115
+
110
116
  return req
111
117
  } catch (e) {
112
118
  ctx.error = e
@@ -12,6 +12,7 @@ const errorServerCh = channel('apm:http:server:request:error')
12
12
  const finishServerCh = channel('apm:http:server:request:finish')
13
13
  const startWriteHeadCh = channel('apm:http:server:response:writeHead:start')
14
14
  const finishSetHeaderCh = channel('datadog:http:server:response:set-header:finish')
15
+ const startSetHeaderCh = channel('datadog:http:server:response:set-header:start')
15
16
 
16
17
  const requestFinishedSet = new WeakSet()
17
18
 
@@ -24,6 +25,12 @@ addHook({ name: httpNames }, http => {
24
25
  shimmer.wrap(http.ServerResponse.prototype, 'writeHead', wrapWriteHead)
25
26
  shimmer.wrap(http.ServerResponse.prototype, 'write', wrapWrite)
26
27
  shimmer.wrap(http.ServerResponse.prototype, 'end', wrapEnd)
28
+ shimmer.wrap(http.ServerResponse.prototype, 'setHeader', wrapSetHeader)
29
+ shimmer.wrap(http.ServerResponse.prototype, 'removeHeader', wrapAppendOrRemoveHeader)
30
+ // Added in node v16.17.0
31
+ if (http.ServerResponse.prototype.appendHeader) {
32
+ shimmer.wrap(http.ServerResponse.prototype, 'appendHeader', wrapAppendOrRemoveHeader)
33
+ }
27
34
  return http
28
35
  })
29
36
 
@@ -65,9 +72,7 @@ function wrapEmit (emit) {
65
72
  // TODO: should this always return true ?
66
73
  return this.listenerCount(eventName) > 0
67
74
  }
68
- if (finishSetHeaderCh.hasSubscribers) {
69
- wrapSetHeader(res)
70
- }
75
+
71
76
  return emit.apply(this, arguments)
72
77
  } catch (err) {
73
78
  errorServerCh.publish(err)
@@ -81,16 +86,6 @@ function wrapEmit (emit) {
81
86
  }
82
87
  }
83
88
 
84
- function wrapSetHeader (res) {
85
- shimmer.wrap(res, 'setHeader', setHeader => {
86
- return function (name, value) {
87
- const setHeaderResult = setHeader.apply(this, arguments)
88
- finishSetHeaderCh.publish({ name, value, res })
89
- return setHeaderResult
90
- }
91
- })
92
- }
93
-
94
89
  function wrapWriteHead (writeHead) {
95
90
  return function wrappedWriteHead (statusCode, reason, obj) {
96
91
  if (!startWriteHeadCh.hasSubscribers) {
@@ -159,6 +154,48 @@ function wrapWrite (write) {
159
154
  }
160
155
  }
161
156
 
157
+ function wrapSetHeader (setHeader) {
158
+ return function wrappedSetHeader (name, value) {
159
+ if (!startSetHeaderCh.hasSubscribers && !finishSetHeaderCh.hasSubscribers) {
160
+ return setHeader.apply(this, arguments)
161
+ }
162
+
163
+ if (startSetHeaderCh.hasSubscribers) {
164
+ const abortController = new AbortController()
165
+ startSetHeaderCh.publish({ res: this, abortController })
166
+
167
+ if (abortController.signal.aborted) {
168
+ return
169
+ }
170
+ }
171
+
172
+ const setHeaderResult = setHeader.apply(this, arguments)
173
+
174
+ if (finishSetHeaderCh.hasSubscribers) {
175
+ finishSetHeaderCh.publish({ name, value, res: this })
176
+ }
177
+
178
+ return setHeaderResult
179
+ }
180
+ }
181
+
182
+ function wrapAppendOrRemoveHeader (originalMethod) {
183
+ return function wrappedAppendOrRemoveHeader () {
184
+ if (!startSetHeaderCh.hasSubscribers) {
185
+ return originalMethod.apply(this, arguments)
186
+ }
187
+
188
+ const abortController = new AbortController()
189
+ startSetHeaderCh.publish({ res: this, abortController })
190
+
191
+ if (abortController.signal.aborted) {
192
+ return this
193
+ }
194
+
195
+ return originalMethod.apply(this, arguments)
196
+ }
197
+ }
198
+
162
199
  function wrapEnd (end) {
163
200
  return function wrappedEnd () {
164
201
  if (!startWriteHeadCh.hasSubscribers) {
@@ -395,9 +395,13 @@ addHook({
395
395
  }
396
396
 
397
397
  const asyncResource = testFileToSuiteAr.get(suite.file)
398
- asyncResource.runInAsyncScope(() => {
399
- testSuiteFinishCh.publish(status)
400
- })
398
+ if (asyncResource) {
399
+ asyncResource.runInAsyncScope(() => {
400
+ testSuiteFinishCh.publish(status)
401
+ })
402
+ } else {
403
+ log.warn(() => `No AsyncResource found for suite ${suite.file}`)
404
+ }
401
405
  })
402
406
 
403
407
  return run.apply(this, arguments)
@@ -424,23 +428,29 @@ addHook({
424
428
  versions: ['>=6.0.0'],
425
429
  file: 'src/WorkerHandler.js'
426
430
  }, (workerHandlerPackage) => {
427
- shimmer.wrap(workerHandlerPackage.prototype, 'exec', exec => function (message, [testSuiteAbsolutePath]) {
431
+ shimmer.wrap(workerHandlerPackage.prototype, 'exec', exec => function (_, path) {
428
432
  if (!testStartCh.hasSubscribers) {
429
433
  return exec.apply(this, arguments)
430
434
  }
435
+ if (!path?.length) {
436
+ return exec.apply(this, arguments)
437
+ }
438
+ const [testSuiteAbsolutePath] = path
439
+ const testSuiteAsyncResource = new AsyncResource('bound-anonymous-fn')
431
440
 
432
- this.worker.on('message', function (message) {
441
+ function onMessage (message) {
433
442
  if (Array.isArray(message)) {
434
443
  const [messageCode, payload] = message
435
444
  if (messageCode === MOCHA_WORKER_TRACE_PAYLOAD_CODE) {
436
- testSessionAsyncResource.runInAsyncScope(() => {
445
+ testSuiteAsyncResource.runInAsyncScope(() => {
437
446
  workerReportTraceCh.publish(payload)
438
447
  })
439
448
  }
440
449
  }
441
- })
450
+ }
451
+
452
+ this.worker.on('message', onMessage)
442
453
 
443
- const testSuiteAsyncResource = new AsyncResource('bound-anonymous-fn')
444
454
  testSuiteAsyncResource.runInAsyncScope(() => {
445
455
  testSuiteStartCh.publish({
446
456
  testSuiteAbsolutePath
@@ -455,12 +465,14 @@ addHook({
455
465
  testSuiteAsyncResource.runInAsyncScope(() => {
456
466
  testSuiteFinishCh.publish(status)
457
467
  })
468
+ this.worker.off('message', onMessage)
458
469
  },
459
470
  (err) => {
460
471
  testSuiteAsyncResource.runInAsyncScope(() => {
461
472
  testSuiteErrorCh.publish(err)
462
473
  testSuiteFinishCh.publish('fail')
463
474
  })
475
+ this.worker.off('message', onMessage)
464
476
  }
465
477
  )
466
478
  return promise
@@ -469,6 +481,7 @@ addHook({
469
481
  testSuiteErrorCh.publish(err)
470
482
  testSuiteFinishCh.publish('fail')
471
483
  })
484
+ this.worker.off('message', onMessage)
472
485
  throw err
473
486
  }
474
487
  })
@@ -0,0 +1,29 @@
1
+ 'use strict'
2
+
3
+ const shimmer = require('../../datadog-shimmer')
4
+ const { channel } = require('dc-polyfill')
5
+
6
+ const startSetUncaughtExceptionCaptureCallback = channel('datadog:process:setUncaughtExceptionCaptureCallback:start')
7
+
8
+ if (process.setUncaughtExceptionCaptureCallback) {
9
+ let currentCallback
10
+
11
+ shimmer.wrap(process, 'setUncaughtExceptionCaptureCallback',
12
+ function wrapSetUncaughtExceptionCaptureCallback (originalSetUncaughtExceptionCaptureCallback) {
13
+ return function setUncaughtExceptionCaptureCallback (newCallback) {
14
+ if (startSetUncaughtExceptionCaptureCallback.hasSubscribers) {
15
+ const abortController = new AbortController()
16
+ startSetUncaughtExceptionCaptureCallback.publish({ newCallback, currentCallback, abortController })
17
+ if (abortController.signal.aborted) {
18
+ return
19
+ }
20
+ }
21
+
22
+ const result = originalSetUncaughtExceptionCaptureCallback.apply(this, arguments)
23
+
24
+ currentCallback = newCallback
25
+
26
+ return result
27
+ }
28
+ })
29
+ }
@@ -32,6 +32,10 @@ function isReporterPackageNew (vitestPackage) {
32
32
  return vitestPackage.e?.name === 'BaseSequencer'
33
33
  }
34
34
 
35
+ function isReporterPackageNewest (vitestPackage) {
36
+ return vitestPackage.h?.name === 'BaseSequencer'
37
+ }
38
+
35
39
  function getChannelPromise (channelToPublishTo) {
36
40
  return new Promise(resolve => {
37
41
  sessionAsyncResource.runInAsyncScope(() => {
@@ -146,6 +150,21 @@ function getSortWrapper (sort) {
146
150
  }
147
151
  }
148
152
 
153
+ function getCreateCliWrapper (vitestPackage, frameworkVersion) {
154
+ shimmer.wrap(vitestPackage, 'c', oldCreateCli => function () {
155
+ if (!testSessionStartCh.hasSubscribers) {
156
+ return oldCreateCli.apply(this, arguments)
157
+ }
158
+ sessionAsyncResource.runInAsyncScope(() => {
159
+ const processArgv = process.argv.slice(2).join(' ')
160
+ testSessionStartCh.publish({ command: `vitest ${processArgv}`, frameworkVersion })
161
+ })
162
+ return oldCreateCli.apply(this, arguments)
163
+ })
164
+
165
+ return vitestPackage
166
+ }
167
+
149
168
  addHook({
150
169
  name: 'vitest',
151
170
  versions: ['>=1.6.0'],
@@ -206,12 +225,25 @@ addHook({
206
225
  return vitestPackage
207
226
  })
208
227
 
228
+ // There are multiple index* files across different versions of vitest,
229
+ // so we check for the existence of BaseSequencer to determine if we are in the right file
230
+ addHook({
231
+ name: 'vitest',
232
+ versions: ['>=1.6.0 <2.0.0'],
233
+ filePattern: 'dist/vendor/index.*'
234
+ }, (vitestPackage) => {
235
+ if (isReporterPackage(vitestPackage)) {
236
+ shimmer.wrap(vitestPackage.B.prototype, 'sort', getSortWrapper)
237
+ }
238
+
239
+ return vitestPackage
240
+ })
241
+
209
242
  addHook({
210
243
  name: 'vitest',
211
- versions: ['>=2.0.0'],
244
+ versions: ['>=2.0.0 <2.0.5'],
212
245
  filePattern: 'dist/vendor/index.*'
213
246
  }, (vitestPackage) => {
214
- // there are multiple index* files so we have to check the exported values
215
247
  if (isReporterPackageNew(vitestPackage)) {
216
248
  shimmer.wrap(vitestPackage.e.prototype, 'sort', getSortWrapper)
217
249
  }
@@ -221,12 +253,11 @@ addHook({
221
253
 
222
254
  addHook({
223
255
  name: 'vitest',
224
- versions: ['>=1.6.0'],
225
- filePattern: 'dist/vendor/index.*'
256
+ versions: ['>=2.0.5'],
257
+ filePattern: 'dist/chunks/index.*'
226
258
  }, (vitestPackage) => {
227
- // there are multiple index* files so we have to check the exported values
228
- if (isReporterPackage(vitestPackage)) {
229
- shimmer.wrap(vitestPackage.B.prototype, 'sort', getSortWrapper)
259
+ if (isReporterPackageNewest(vitestPackage)) {
260
+ shimmer.wrap(vitestPackage.h.prototype, 'sort', getSortWrapper)
230
261
  }
231
262
 
232
263
  return vitestPackage
@@ -235,22 +266,15 @@ addHook({
235
266
  // Can't specify file because compiled vitest includes hashes in their files
236
267
  addHook({
237
268
  name: 'vitest',
238
- versions: ['>=1.6.0'],
269
+ versions: ['>=1.6.0 <2.0.5'],
239
270
  filePattern: 'dist/vendor/cac.*'
240
- }, (vitestPackage, frameworkVersion) => {
241
- shimmer.wrap(vitestPackage, 'c', oldCreateCli => function () {
242
- if (!testSessionStartCh.hasSubscribers) {
243
- return oldCreateCli.apply(this, arguments)
244
- }
245
- sessionAsyncResource.runInAsyncScope(() => {
246
- const processArgv = process.argv.slice(2).join(' ')
247
- testSessionStartCh.publish({ command: `vitest ${processArgv}`, frameworkVersion })
248
- })
249
- return oldCreateCli.apply(this, arguments)
250
- })
271
+ }, getCreateCliWrapper)
251
272
 
252
- return vitestPackage
253
- })
273
+ addHook({
274
+ name: 'vitest',
275
+ versions: ['>=2.0.5'],
276
+ filePattern: 'dist/chunks/cac.*'
277
+ }, getCreateCliWrapper)
254
278
 
255
279
  // test suite start and finish
256
280
  // only relevant for workers
@@ -258,7 +282,7 @@ addHook({
258
282
  name: '@vitest/runner',
259
283
  versions: ['>=1.6.0'],
260
284
  file: 'dist/index.js'
261
- }, vitestPackage => {
285
+ }, (vitestPackage, frameworkVersion) => {
262
286
  shimmer.wrap(vitestPackage, 'startTests', startTests => async function (testPath) {
263
287
  let testSuiteError = null
264
288
  if (!testSuiteStartCh.hasSubscribers) {
@@ -267,7 +291,7 @@ addHook({
267
291
 
268
292
  const testSuiteAsyncResource = new AsyncResource('bound-anonymous-fn')
269
293
  testSuiteAsyncResource.runInAsyncScope(() => {
270
- testSuiteStartCh.publish(testPath[0])
294
+ testSuiteStartCh.publish({ testSuiteAbsolutePath: testPath[0], frameworkVersion })
271
295
  })
272
296
  const startTestsResponse = await startTests.apply(this, arguments)
273
297
 
@@ -4,6 +4,7 @@ const analyticsSampler = require('../../dd-trace/src/analytics_sampler')
4
4
  const ClientPlugin = require('../../dd-trace/src/plugins/client')
5
5
  const { storage } = require('../../datadog-core')
6
6
  const { isTrue } = require('../../dd-trace/src/util')
7
+ const coalesce = require('koalas')
7
8
 
8
9
  class BaseAwsSdkPlugin extends ClientPlugin {
9
10
  static get id () { return 'aws' }
@@ -163,9 +164,22 @@ function normalizeConfig (config, serviceIdentifier) {
163
164
  break
164
165
  }
165
166
 
167
+ // check if AWS batch propagation or AWS_[SERVICE] batch propagation is enabled via env variable
168
+ const serviceId = serviceIdentifier.toUpperCase()
169
+ const batchPropagationEnabled = isTrue(
170
+ coalesce(
171
+ specificConfig.batchPropagationEnabled,
172
+ process.env[`DD_TRACE_AWS_SDK_${serviceId}_BATCH_PROPAGATION_ENABLED`],
173
+ config.batchPropagationEnabled,
174
+ process.env.DD_TRACE_AWS_SDK_BATCH_PROPAGATION_ENABLED,
175
+ false
176
+ )
177
+ )
178
+
179
+ // Merge the specific config back into the main config
166
180
  return Object.assign({}, config, specificConfig, {
167
181
  splitByAwsService: config.splitByAwsService !== false,
168
- batchPropagationEnabled: config.batchPropagationEnabled !== false,
182
+ batchPropagationEnabled,
169
183
  hooks
170
184
  })
171
185
  }
@@ -156,7 +156,7 @@ class Kinesis extends BaseAwsSdkPlugin {
156
156
  span,
157
157
  params.Records[i],
158
158
  stream,
159
- i === 0 || (this.config.kinesis && this.config.kinesis.batchPropagationEnabled)
159
+ i === 0 || (this.config.batchPropagationEnabled)
160
160
  )
161
161
  }
162
162
  }
@@ -63,7 +63,7 @@ class Sns extends BaseAwsSdkPlugin {
63
63
  span,
64
64
  params.PublishBatchRequestEntries[i],
65
65
  params.TopicArn,
66
- i === 0 || (this.config.sns && this.config.sns.batchPropagationEnabled)
66
+ i === 0 || (this.config.batchPropagationEnabled)
67
67
  )
68
68
  }
69
69
  break
@@ -157,8 +157,8 @@ class Sqs extends BaseAwsSdkPlugin {
157
157
  if (attributes.StringValue) {
158
158
  const textMap = attributes.StringValue
159
159
  return JSON.parse(textMap)
160
- } else if (attributes.Type === 'Binary') {
161
- const buffer = Buffer.from(attributes.Value, 'base64')
160
+ } else if (attributes.Type === 'Binary' || attributes.DataType === 'Binary') {
161
+ const buffer = Buffer.from(attributes.Value ?? attributes.BinaryValue, 'base64')
162
162
  return JSON.parse(buffer)
163
163
  }
164
164
  } catch (e) {
@@ -222,7 +222,7 @@ class Sqs extends BaseAwsSdkPlugin {
222
222
  span,
223
223
  params.Entries[i],
224
224
  params.QueueUrl,
225
- i === 0 || (this.config.sqs && this.config.sqs.batchPropagationEnabled)
225
+ i === 0 || (this.config.batchPropagationEnabled)
226
226
  )
227
227
  }
228
228
  break
@@ -258,7 +258,7 @@ class CypressPlugin {
258
258
  })
259
259
  }
260
260
 
261
- getTestSpan (testName, testSuite, isUnskippable, isForcedToRun) {
261
+ getTestSpan ({ testName, testSuite, isUnskippable, isForcedToRun, testSourceFile }) {
262
262
  const testSuiteTags = {
263
263
  [TEST_COMMAND]: this.command,
264
264
  [TEST_COMMAND]: this.command,
@@ -282,8 +282,11 @@ class CypressPlugin {
282
282
  ...testSpanMetadata
283
283
  } = getTestCommonTags(testName, testSuite, this.cypressConfig.version, TEST_FRAMEWORK_NAME)
284
284
 
285
- const codeOwners = getCodeOwnersForFilename(testSuite, this.codeOwnersEntries)
285
+ if (testSourceFile) {
286
+ testSpanMetadata[TEST_SOURCE_FILE] = testSourceFile
287
+ }
286
288
 
289
+ const codeOwners = this.getTestCodeOwners({ testSuite, testSourceFile })
287
290
  if (codeOwners) {
288
291
  testSpanMetadata[TEST_CODE_OWNERS] = codeOwners
289
292
  }
@@ -480,12 +483,16 @@ class CypressPlugin {
480
483
  const isSkippedByItr = this.testsToSkip.find(test =>
481
484
  cypressTestName === test.name && spec.relative === test.suite
482
485
  )
483
- const skippedTestSpan = this.getTestSpan(cypressTestName, spec.relative)
486
+ let testSourceFile
487
+
484
488
  if (spec.absolute && this.repositoryRoot) {
485
- skippedTestSpan.setTag(TEST_SOURCE_FILE, getTestSuitePath(spec.absolute, this.repositoryRoot))
489
+ testSourceFile = getTestSuitePath(spec.absolute, this.repositoryRoot)
486
490
  } else {
487
- skippedTestSpan.setTag(TEST_SOURCE_FILE, spec.relative)
491
+ testSourceFile = spec.relative
488
492
  }
493
+
494
+ const skippedTestSpan = this.getTestSpan({ testName: cypressTestName, testSuite: spec.relative, testSourceFile })
495
+
489
496
  skippedTestSpan.setTag(TEST_STATUS, 'skip')
490
497
  if (isSkippedByItr) {
491
498
  skippedTestSpan.setTag(TEST_SKIPPED_BY_ITR, 'true')
@@ -538,11 +545,21 @@ class CypressPlugin {
538
545
  if (this.itrCorrelationId) {
539
546
  finishedTest.testSpan.setTag(ITR_CORRELATION_ID, this.itrCorrelationId)
540
547
  }
548
+ let testSourceFile
541
549
  if (spec.absolute && this.repositoryRoot) {
542
- finishedTest.testSpan.setTag(TEST_SOURCE_FILE, getTestSuitePath(spec.absolute, this.repositoryRoot))
550
+ testSourceFile = getTestSuitePath(spec.absolute, this.repositoryRoot)
543
551
  } else {
544
- finishedTest.testSpan.setTag(TEST_SOURCE_FILE, spec.relative)
552
+ testSourceFile = spec.relative
553
+ }
554
+ if (testSourceFile) {
555
+ finishedTest.testSpan.setTag(TEST_SOURCE_FILE, testSourceFile)
556
+ }
557
+ const codeOwners = this.getTestCodeOwners({ testSuite: spec.relative, testSourceFile })
558
+
559
+ if (codeOwners) {
560
+ finishedTest.testSpan.setTag(TEST_CODE_OWNERS, codeOwners)
545
561
  }
562
+
546
563
  finishedTest.testSpan.finish(finishedTest.finishTime)
547
564
  })
548
565
  })
@@ -591,7 +608,12 @@ class CypressPlugin {
591
608
  }
592
609
 
593
610
  if (!this.activeTestSpan) {
594
- this.activeTestSpan = this.getTestSpan(testName, testSuite, isUnskippable, isForcedToRun)
611
+ this.activeTestSpan = this.getTestSpan({
612
+ testName,
613
+ testSuite,
614
+ isUnskippable,
615
+ isForcedToRun
616
+ })
595
617
  }
596
618
 
597
619
  return this.activeTestSpan ? { traceId: this.activeTestSpan.context().toTraceId() } : {}
@@ -658,6 +680,13 @@ class CypressPlugin {
658
680
  }
659
681
  }
660
682
  }
683
+
684
+ getTestCodeOwners ({ testSuite, testSourceFile }) {
685
+ if (testSourceFile) {
686
+ return getCodeOwnersForFilename(testSourceFile, this.codeOwnersEntries)
687
+ }
688
+ return getCodeOwnersForFilename(testSuite, this.codeOwnersEntries)
689
+ }
661
690
  }
662
691
 
663
692
  module.exports = new CypressPlugin()
@@ -102,7 +102,8 @@ class VitestPlugin extends CiPlugin {
102
102
  ).finish()
103
103
  })
104
104
 
105
- this.addSub('ci:vitest:test-suite:start', (testSuiteAbsolutePath) => {
105
+ this.addSub('ci:vitest:test-suite:start', ({ testSuiteAbsolutePath, frameworkVersion }) => {
106
+ this.frameworkVersion = frameworkVersion
106
107
  const testSessionSpanContext = this.tracer.extract('text_map', {
107
108
  'x-datadog-trace-id': process.env.DD_CIVISIBILITY_TEST_SESSION_ID,
108
109
  'x-datadog-parent-id': process.env.DD_CIVISIBILITY_TEST_MODULE_ID