dd-trace 5.3.0 → 5.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/CONTRIBUTING.md +98 -0
  2. package/README.md +4 -102
  3. package/ci/cypress/after-run.js +1 -0
  4. package/package.json +2 -2
  5. package/packages/datadog-instrumentations/src/cucumber.js +156 -42
  6. package/packages/datadog-instrumentations/src/jest.js +84 -49
  7. package/packages/datadog-instrumentations/src/mocha.js +139 -13
  8. package/packages/datadog-plugin-amqplib/src/consumer.js +5 -2
  9. package/packages/datadog-plugin-aws-sdk/src/services/kinesis.js +60 -50
  10. package/packages/datadog-plugin-aws-sdk/src/services/sns.js +40 -17
  11. package/packages/datadog-plugin-aws-sdk/src/services/sqs.js +62 -26
  12. package/packages/datadog-plugin-cucumber/src/index.js +25 -9
  13. package/packages/datadog-plugin-cypress/src/after-run.js +3 -0
  14. package/packages/datadog-plugin-cypress/src/cypress-plugin.js +560 -0
  15. package/packages/datadog-plugin-cypress/src/plugin.js +6 -533
  16. package/packages/datadog-plugin-jest/src/index.js +4 -8
  17. package/packages/datadog-plugin-kafkajs/src/consumer.js +16 -0
  18. package/packages/datadog-plugin-mocha/src/index.js +38 -17
  19. package/packages/datadog-plugin-rhea/src/consumer.js +4 -1
  20. package/packages/dd-trace/src/appsec/iast/context/context-plugin.js +90 -0
  21. package/packages/dd-trace/src/appsec/iast/context/kafka-ctx-plugin.js +14 -0
  22. package/packages/dd-trace/src/appsec/iast/iast-plugin.js +8 -0
  23. package/packages/dd-trace/src/appsec/iast/index.js +4 -4
  24. package/packages/dd-trace/src/appsec/iast/overhead-controller.js +1 -1
  25. package/packages/dd-trace/src/appsec/iast/taint-tracking/csi-methods.js +1 -0
  26. package/packages/dd-trace/src/appsec/iast/taint-tracking/index.js +10 -0
  27. package/packages/dd-trace/src/appsec/iast/taint-tracking/operations-taint-object.js +53 -0
  28. package/packages/dd-trace/src/appsec/iast/taint-tracking/operations.js +10 -46
  29. package/packages/dd-trace/src/appsec/iast/taint-tracking/plugin.js +13 -9
  30. package/packages/dd-trace/src/appsec/iast/taint-tracking/plugins/kafka.js +47 -0
  31. package/packages/dd-trace/src/appsec/iast/taint-tracking/source-types.js +3 -1
  32. package/packages/dd-trace/src/appsec/iast/taint-tracking/taint-tracking-impl.js +29 -2
  33. package/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js +1 -1
  34. package/packages/dd-trace/src/appsec/remote_config/capabilities.js +2 -1
  35. package/packages/dd-trace/src/appsec/remote_config/index.js +1 -0
  36. package/packages/dd-trace/src/config.js +3 -2
  37. package/packages/dd-trace/src/opentracing/propagation/text_map.js +1 -1
  38. package/packages/dd-trace/src/opentracing/span.js +4 -4
  39. package/packages/dd-trace/src/plugins/ci_plugin.js +1 -1
  40. package/packages/dd-trace/src/plugins/util/test.js +17 -1
  41. package/packages/dd-trace/src/profiling/exporters/agent.js +40 -31
  42. package/packages/dd-trace/src/telemetry/index.js +3 -0
  43. package/packages/dd-trace/src/telemetry/logs/index.js +2 -2
  44. package/packages/dd-trace/src/telemetry/send-data.js +0 -3
@@ -9,7 +9,9 @@ const {
9
9
  JEST_WORKER_COVERAGE_PAYLOAD_CODE,
10
10
  getTestLineStart,
11
11
  getTestSuitePath,
12
- getTestParametersString
12
+ getTestParametersString,
13
+ EFD_STRING,
14
+ removeEfdStringFromTestName
13
15
  } = require('../../dd-trace/src/plugins/util/test')
14
16
  const {
15
17
  getFormattedJestTestParameters,
@@ -56,9 +58,7 @@ let hasUnskippableSuites = false
56
58
  let hasForcedToRunSuites = false
57
59
  let isEarlyFlakeDetectionEnabled = false
58
60
  let earlyFlakeDetectionNumRetries = 0
59
-
60
- const EFD_STRING = "Retried by Datadog's Early Flake Detection"
61
- const EFD_TEST_NAME_REGEX = new RegExp(EFD_STRING + ' \\(#\\d+\\): ', 'g')
61
+ let hasFilteredSkippableSuites = false
62
62
 
63
63
  const sessionAsyncResource = new AsyncResource('bound-anonymous-fn')
64
64
 
@@ -105,10 +105,6 @@ function getEfdTestName (testName, numAttempt) {
105
105
  return `${EFD_STRING} (#${numAttempt}): ${testName}`
106
106
  }
107
107
 
108
- function removeEfdTestName (testName) {
109
- return testName.replace(EFD_TEST_NAME_REGEX, '')
110
- }
111
-
112
108
  function getWrappedEnvironment (BaseEnvironment, jestVersion) {
113
109
  return class DatadogEnvironment extends BaseEnvironment {
114
110
  constructor (config, context) {
@@ -116,12 +112,17 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) {
116
112
  const rootDir = config.globalConfig ? config.globalConfig.rootDir : config.rootDir
117
113
  this.rootDir = rootDir
118
114
  this.testSuite = getTestSuitePath(context.testPath, rootDir)
119
- this.testFileAbsolutePath = context.testPath
120
115
  this.nameToParams = {}
121
116
  this.global._ddtrace = global._ddtrace
122
117
 
123
118
  this.testEnvironmentOptions = getTestEnvironmentOptions(config)
124
119
 
120
+ const repositoryRoot = this.testEnvironmentOptions._ddRepositoryRoot
121
+
122
+ if (repositoryRoot) {
123
+ this.testSourceFile = getTestSuitePath(context.testPath, repositoryRoot)
124
+ }
125
+
125
126
  this.isEarlyFlakeDetectionEnabled = this.testEnvironmentOptions._ddIsEarlyFlakeDetectionEnabled
126
127
 
127
128
  if (this.isEarlyFlakeDetectionEnabled) {
@@ -152,7 +153,7 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) {
152
153
  // we use its describe block to get the full name
153
154
  getTestNameFromAddTestEvent (event, state) {
154
155
  const describeSuffix = getJestTestName(state.currentDescribeBlock)
155
- return removeEfdTestName(`${describeSuffix} ${event.testName}`).trim()
156
+ return removeEfdStringFromTestName(`${describeSuffix} ${event.testName}`).trim()
156
157
  }
157
158
 
158
159
  async handleTestEvent (event, state) {
@@ -186,7 +187,7 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) {
186
187
  const testName = getJestTestName(event.test)
187
188
 
188
189
  if (this.isEarlyFlakeDetectionEnabled) {
189
- const originalTestName = removeEfdTestName(testName)
190
+ const originalTestName = removeEfdStringFromTestName(testName)
190
191
  isNewTest = retriedTestsToNumAttempts.has(originalTestName)
191
192
  if (isNewTest) {
192
193
  numEfdRetry = retriedTestsToNumAttempts.get(originalTestName)
@@ -196,9 +197,9 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) {
196
197
 
197
198
  asyncResource.runInAsyncScope(() => {
198
199
  testStartCh.publish({
199
- name: removeEfdTestName(testName),
200
+ name: removeEfdStringFromTestName(testName),
200
201
  suite: this.testSuite,
201
- testFileAbsolutePath: this.testFileAbsolutePath,
202
+ testSourceFile: this.testSourceFile,
202
203
  runner: 'jest-circus',
203
204
  testParameters,
204
205
  frameworkVersion: jestVersion,
@@ -249,7 +250,7 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) {
249
250
  testSkippedCh.publish({
250
251
  name: getJestTestName(event.test),
251
252
  suite: this.testSuite,
252
- testFileAbsolutePath: this.testFileAbsolutePath,
253
+ testSourceFile: this.testSourceFile,
253
254
  runner: 'jest-circus',
254
255
  frameworkVersion: jestVersion,
255
256
  testStartLine: getTestLineStart(event.test.asyncError, this.testSuite)
@@ -270,6 +271,23 @@ function getTestEnvironment (pkg, jestVersion) {
270
271
  return getWrappedEnvironment(pkg, jestVersion)
271
272
  }
272
273
 
274
+ function applySuiteSkipping (originalTests, rootDir, frameworkVersion) {
275
+ const jestSuitesToRun = getJestSuitesToRun(skippableSuites, originalTests, rootDir || process.cwd())
276
+ hasFilteredSkippableSuites = true
277
+ log.debug(
278
+ () => `${jestSuitesToRun.suitesToRun.length} out of ${originalTests.length} suites are going to run.`
279
+ )
280
+ hasUnskippableSuites = jestSuitesToRun.hasUnskippableSuites
281
+ hasForcedToRunSuites = jestSuitesToRun.hasForcedToRunSuites
282
+
283
+ isSuitesSkipped = jestSuitesToRun.suitesToRun.length !== originalTests.length
284
+ numSkippedSuites = jestSuitesToRun.skippedSuites.length
285
+
286
+ itrSkippedSuitesCh.publish({ skippedSuites: jestSuitesToRun.skippedSuites, frameworkVersion })
287
+ skippableSuites = []
288
+ return jestSuitesToRun.suitesToRun
289
+ }
290
+
273
291
  addHook({
274
292
  name: 'jest-environment-node',
275
293
  versions: ['>=24.8.0']
@@ -280,6 +298,51 @@ addHook({
280
298
  versions: ['>=24.8.0']
281
299
  }, getTestEnvironment)
282
300
 
301
+ function getWrappedScheduleTests (scheduleTests, frameworkVersion) {
302
+ return async function (tests) {
303
+ if (!isSuitesSkippingEnabled || hasFilteredSkippableSuites) {
304
+ return scheduleTests.apply(this, arguments)
305
+ }
306
+ const [test] = tests
307
+ const rootDir = test?.context?.config?.rootDir
308
+
309
+ arguments[0] = applySuiteSkipping(tests, rootDir, frameworkVersion)
310
+
311
+ return scheduleTests.apply(this, arguments)
312
+ }
313
+ }
314
+
315
+ addHook({
316
+ name: '@jest/core',
317
+ file: 'build/TestScheduler.js',
318
+ versions: ['>=27.0.0']
319
+ }, (testSchedulerPackage, frameworkVersion) => {
320
+ const oldCreateTestScheduler = testSchedulerPackage.createTestScheduler
321
+ const newCreateTestScheduler = async function () {
322
+ if (!isSuitesSkippingEnabled || hasFilteredSkippableSuites) {
323
+ return oldCreateTestScheduler.apply(this, arguments)
324
+ }
325
+ // If suite skipping is enabled and has not filtered skippable suites yet, we'll attempt to do it
326
+ const scheduler = await oldCreateTestScheduler.apply(this, arguments)
327
+ shimmer.wrap(scheduler, 'scheduleTests', scheduleTests => getWrappedScheduleTests(scheduleTests, frameworkVersion))
328
+ return scheduler
329
+ }
330
+ testSchedulerPackage.createTestScheduler = newCreateTestScheduler
331
+ return testSchedulerPackage
332
+ })
333
+
334
+ addHook({
335
+ name: '@jest/core',
336
+ file: 'build/TestScheduler.js',
337
+ versions: ['>=24.8.0 <27.0.0']
338
+ }, (testSchedulerPackage, frameworkVersion) => {
339
+ shimmer.wrap(
340
+ testSchedulerPackage.default.prototype,
341
+ 'scheduleTests', scheduleTests => getWrappedScheduleTests(scheduleTests, frameworkVersion)
342
+ )
343
+ return testSchedulerPackage
344
+ })
345
+
283
346
  addHook({
284
347
  name: '@jest/test-sequencer',
285
348
  versions: ['>=24.8.0']
@@ -287,29 +350,13 @@ addHook({
287
350
  shimmer.wrap(sequencerPackage.default.prototype, 'shard', shard => function () {
288
351
  const shardedTests = shard.apply(this, arguments)
289
352
 
290
- if (!shardedTests.length) {
353
+ if (!shardedTests.length || !isSuitesSkippingEnabled || !skippableSuites.length) {
291
354
  return shardedTests
292
355
  }
293
- // TODO: could we get the rootDir from each test?
294
356
  const [test] = shardedTests
295
357
  const rootDir = test?.context?.config?.rootDir
296
358
 
297
- const jestSuitesToRun = getJestSuitesToRun(skippableSuites, shardedTests, rootDir || process.cwd())
298
-
299
- log.debug(
300
- () => `${jestSuitesToRun.suitesToRun.length} out of ${shardedTests.length} suites are going to run.`
301
- )
302
-
303
- hasUnskippableSuites = jestSuitesToRun.hasUnskippableSuites
304
- hasForcedToRunSuites = jestSuitesToRun.hasForcedToRunSuites
305
-
306
- isSuitesSkipped = jestSuitesToRun.suitesToRun.length !== shardedTests.length
307
- numSkippedSuites = jestSuitesToRun.skippedSuites.length
308
-
309
- itrSkippedSuitesCh.publish({ skippedSuites: jestSuitesToRun.skippedSuites, frameworkVersion })
310
-
311
- skippableSuites = []
312
- return jestSuitesToRun.suitesToRun
359
+ return applySuiteSkipping(shardedTests, rootDir, frameworkVersion)
313
360
  })
314
361
  return sequencerPackage
315
362
  })
@@ -635,6 +682,7 @@ addHook({
635
682
  _ddKnownTests,
636
683
  _ddIsEarlyFlakeDetectionEnabled,
637
684
  _ddEarlyFlakeDetectionNumRetries,
685
+ _ddRepositoryRoot,
638
686
  ...restOfTestEnvironmentOptions
639
687
  } = testEnvironmentOptions
640
688
 
@@ -659,13 +707,13 @@ addHook({
659
707
  const SearchSource = searchSourcePackage.default ? searchSourcePackage.default : searchSourcePackage
660
708
 
661
709
  shimmer.wrap(SearchSource.prototype, 'getTestPaths', getTestPaths => async function () {
662
- if (!skippableSuites.length) {
710
+ if (!isSuitesSkippingEnabled || !skippableSuites.length) {
663
711
  return getTestPaths.apply(this, arguments)
664
712
  }
665
713
 
666
714
  const [{ rootDir, shard }] = arguments
667
715
 
668
- if (shard && shard.shardIndex) {
716
+ if (shard?.shardCount > 1) {
669
717
  // If the user is using jest sharding, we want to apply the filtering of tests in the shard process.
670
718
  // The reason for this is the following:
671
719
  // The tests for different shards are likely being run in different CI jobs so
@@ -679,21 +727,8 @@ addHook({
679
727
  const testPaths = await getTestPaths.apply(this, arguments)
680
728
  const { tests } = testPaths
681
729
 
682
- const jestSuitesToRun = getJestSuitesToRun(skippableSuites, tests, rootDir)
683
-
684
- log.debug(() => `${jestSuitesToRun.suitesToRun.length} out of ${tests.length} suites are going to run.`)
685
-
686
- hasUnskippableSuites = jestSuitesToRun.hasUnskippableSuites
687
- hasForcedToRunSuites = jestSuitesToRun.hasForcedToRunSuites
688
-
689
- isSuitesSkipped = jestSuitesToRun.suitesToRun.length !== tests.length
690
- numSkippedSuites = jestSuitesToRun.skippedSuites.length
691
-
692
- itrSkippedSuitesCh.publish({ skippedSuites: jestSuitesToRun.skippedSuites, frameworkVersion })
693
-
694
- skippableSuites = []
695
-
696
- return { ...testPaths, tests: jestSuitesToRun.suitesToRun }
730
+ const suitesToRun = applySuiteSkipping(tests, rootDir, frameworkVersion)
731
+ return { ...testPaths, tests: suitesToRun }
697
732
  })
698
733
 
699
734
  return searchSourcePackage
@@ -11,7 +11,9 @@ const {
11
11
  mergeCoverage,
12
12
  getTestSuitePath,
13
13
  fromCoverageMapToCoverage,
14
- getCallSites
14
+ getCallSites,
15
+ addEfdStringToTestName,
16
+ removeEfdStringFromTestName
15
17
  } = require('../../dd-trace/src/plugins/util/test')
16
18
 
17
19
  const testStartCh = channel('ci:mocha:test:start')
@@ -21,6 +23,7 @@ const testFinishCh = channel('ci:mocha:test:finish')
21
23
  const parameterizedTestCh = channel('ci:mocha:test:parameterize')
22
24
 
23
25
  const libraryConfigurationCh = channel('ci:mocha:library-configuration')
26
+ const knownTestsCh = channel('ci:mocha:known-tests')
24
27
  const skippableSuitesCh = channel('ci:mocha:test-suite:skippable')
25
28
 
26
29
  const testSessionStartCh = channel('ci:mocha:session:start')
@@ -40,6 +43,7 @@ const testToAr = new WeakMap()
40
43
  const originalFns = new WeakMap()
41
44
  const testFileToSuiteAr = new Map()
42
45
  const testToStartLine = new WeakMap()
46
+ const newTests = {}
43
47
 
44
48
  // `isWorker` is true if it's a Mocha worker
45
49
  let isWorker = false
@@ -54,6 +58,10 @@ let skippedSuites = []
54
58
  const unskippableSuites = []
55
59
  let isForcedToRun = false
56
60
  let itrCorrelationId = ''
61
+ let isEarlyFlakeDetectionEnabled = false
62
+ let earlyFlakeDetectionNumRetries = 0
63
+ let isSuitesSkippingEnabled = false
64
+ let knownTests = []
57
65
 
58
66
  function getSuitesByTestFile (root) {
59
67
  const suitesByTestFile = {}
@@ -93,6 +101,26 @@ function isRetry (test) {
93
101
  return test._currentRetry !== undefined && test._currentRetry !== 0
94
102
  }
95
103
 
104
+ function getTestFullName (test) {
105
+ return `mocha.${getTestSuitePath(test.file, process.cwd())}.${removeEfdStringFromTestName(test.fullTitle())}`
106
+ }
107
+
108
+ function isNewTest (test) {
109
+ return !knownTests.includes(getTestFullName(test))
110
+ }
111
+
112
+ function retryTest (test) {
113
+ const originalTestName = test.title
114
+ const suite = test.parent
115
+ for (let retryIndex = 0; retryIndex < earlyFlakeDetectionNumRetries; retryIndex++) {
116
+ const clonedTest = test.clone()
117
+ clonedTest.title = addEfdStringToTestName(originalTestName, retryIndex + 1)
118
+ suite.addTest(clonedTest)
119
+ clonedTest._ddIsNew = true
120
+ clonedTest._ddIsEfdRetry = true
121
+ }
122
+ }
123
+
96
124
  function getTestAsyncResource (test) {
97
125
  if (!test.fn) {
98
126
  return testToAr.get(test)
@@ -123,6 +151,19 @@ function mochaHook (Runner) {
123
151
 
124
152
  patched.add(Runner)
125
153
 
154
+ shimmer.wrap(Runner.prototype, 'runTests', runTests => function (suite, fn) {
155
+ if (isEarlyFlakeDetectionEnabled) {
156
+ // by the time we reach `this.on('test')`, it is too late. We need to add retries here
157
+ suite.tests.forEach(test => {
158
+ if (!test.isPending() && isNewTest(test)) {
159
+ test._ddIsNew = true
160
+ retryTest(test)
161
+ }
162
+ })
163
+ }
164
+ return runTests.apply(this, arguments)
165
+ })
166
+
126
167
  shimmer.wrap(Runner.prototype, 'run', run => function () {
127
168
  if (!testStartCh.hasSubscribers || isWorker) {
128
169
  return run.apply(this, arguments)
@@ -144,6 +185,24 @@ function mochaHook (Runner) {
144
185
  status = 'fail'
145
186
  }
146
187
 
188
+ if (isEarlyFlakeDetectionEnabled) {
189
+ /**
190
+ * If Early Flake Detection (EFD) is enabled the logic is as follows:
191
+ * - If all attempts for a test are failing, the test has failed and we will let the test process fail.
192
+ * - If just a single attempt passes, we will prevent the test process from failing.
193
+ * The rationale behind is the following: you may still be able to block your CI pipeline by gating
194
+ * on flakiness (the test will be considered flaky), but you may choose to unblock the pipeline too.
195
+ */
196
+ for (const tests of Object.values(newTests)) {
197
+ const failingNewTests = tests.filter(test => test.isFailed())
198
+ const areAllNewTestsFailing = failingNewTests.length === tests.length
199
+ if (failingNewTests.length && !areAllNewTestsFailing) {
200
+ this.stats.failures -= failingNewTests.length
201
+ this.failures -= failingNewTests.length
202
+ }
203
+ }
204
+ }
205
+
147
206
  if (status === 'fail') {
148
207
  error = new Error(`Failed tests: ${this.failures}.`)
149
208
  }
@@ -168,7 +227,8 @@ function mochaHook (Runner) {
168
227
  numSkippedSuites: skippedSuites.length,
169
228
  hasForcedToRunSuites: isForcedToRun,
170
229
  hasUnskippableSuites: !!unskippableSuites.length,
171
- error
230
+ error,
231
+ isEarlyFlakeDetectionEnabled
172
232
  })
173
233
  }))
174
234
 
@@ -253,8 +313,35 @@ function mochaHook (Runner) {
253
313
  const testStartLine = testToStartLine.get(test)
254
314
  const asyncResource = new AsyncResource('bound-anonymous-fn')
255
315
  testToAr.set(test.fn, asyncResource)
316
+
317
+ const {
318
+ file: testSuiteAbsolutePath,
319
+ title,
320
+ _ddIsNew: isNew,
321
+ _ddIsEfdRetry: isEfdRetry
322
+ } = test
323
+
324
+ const testInfo = {
325
+ testName: test.fullTitle(),
326
+ testSuiteAbsolutePath,
327
+ title,
328
+ isNew,
329
+ isEfdRetry,
330
+ testStartLine
331
+ }
332
+
333
+ // We want to store the result of the new tests
334
+ if (isNew) {
335
+ const testFullName = getTestFullName(test)
336
+ if (newTests[testFullName]) {
337
+ newTests[testFullName].push(test)
338
+ } else {
339
+ newTests[testFullName] = [test]
340
+ }
341
+ }
342
+
256
343
  asyncResource.runInAsyncScope(() => {
257
- testStartCh.publish({ test, testStartLine })
344
+ testStartCh.publish(testInfo)
258
345
  })
259
346
  })
260
347
 
@@ -323,10 +410,23 @@ function mochaHook (Runner) {
323
410
  })
324
411
 
325
412
  this.on('pending', (test) => {
413
+ const testStartLine = testToStartLine.get(test)
414
+ const {
415
+ file: testSuiteAbsolutePath,
416
+ title
417
+ } = test
418
+
419
+ const testInfo = {
420
+ testName: test.fullTitle(),
421
+ testSuiteAbsolutePath,
422
+ title,
423
+ testStartLine
424
+ }
425
+
326
426
  const asyncResource = getTestAsyncResource(test)
327
427
  if (asyncResource) {
328
428
  asyncResource.runInAsyncScope(() => {
329
- skipCh.publish(test)
429
+ skipCh.publish(testInfo)
330
430
  })
331
431
  } else {
332
432
  // if there is no async resource, the test has been skipped through `test.skip`
@@ -338,7 +438,7 @@ function mochaHook (Runner) {
338
438
  testToAr.set(test, skippedTestAsyncResource)
339
439
  }
340
440
  skippedTestAsyncResource.runInAsyncScope(() => {
341
- skipCh.publish(test)
441
+ skipCh.publish(testInfo)
342
442
  })
343
443
  }
344
444
  })
@@ -358,8 +458,8 @@ function mochaEachHook (mochaEach) {
358
458
  const [params] = arguments
359
459
  const { it, ...rest } = mochaEach.apply(this, arguments)
360
460
  return {
361
- it: function (name) {
362
- parameterizedTestCh.publish({ name, params })
461
+ it: function (title) {
462
+ parameterizedTestCh.publish({ title, params })
363
463
  it.apply(this, arguments)
364
464
  },
365
465
  ...rest
@@ -425,17 +525,43 @@ addHook({
425
525
  global.run()
426
526
  }
427
527
 
428
- const onReceivedConfiguration = ({ err }) => {
528
+ const onReceivedKnownTests = ({ err, knownTests: receivedKnownTests }) => {
429
529
  if (err) {
430
- return global.run()
530
+ knownTests = []
531
+ isEarlyFlakeDetectionEnabled = false
532
+ } else {
533
+ knownTests = receivedKnownTests
534
+ }
535
+
536
+ if (isSuitesSkippingEnabled) {
537
+ skippableSuitesCh.publish({
538
+ onDone: mochaRunAsyncResource.bind(onReceivedSkippableSuites)
539
+ })
540
+ } else {
541
+ global.run()
431
542
  }
432
- if (!skippableSuitesCh.hasSubscribers) {
543
+ }
544
+
545
+ const onReceivedConfiguration = ({ err, libraryConfig }) => {
546
+ if (err || !skippableSuitesCh.hasSubscribers || !knownTestsCh.hasSubscribers) {
433
547
  return global.run()
434
548
  }
435
549
 
436
- skippableSuitesCh.publish({
437
- onDone: mochaRunAsyncResource.bind(onReceivedSkippableSuites)
438
- })
550
+ isEarlyFlakeDetectionEnabled = libraryConfig.isEarlyFlakeDetectionEnabled
551
+ isSuitesSkippingEnabled = libraryConfig.isSuitesSkippingEnabled
552
+ earlyFlakeDetectionNumRetries = libraryConfig.earlyFlakeDetectionNumRetries
553
+
554
+ if (isEarlyFlakeDetectionEnabled) {
555
+ knownTestsCh.publish({
556
+ onDone: mochaRunAsyncResource.bind(onReceivedKnownTests)
557
+ })
558
+ } else if (isSuitesSkippingEnabled) {
559
+ skippableSuitesCh.publish({
560
+ onDone: mochaRunAsyncResource.bind(onReceivedSkippableSuites)
561
+ })
562
+ } else {
563
+ global.run()
564
+ }
439
565
  }
440
566
 
441
567
  mochaRunAsyncResource.runInAsyncScope(() => {
@@ -28,9 +28,12 @@ class AmqplibConsumerPlugin extends ConsumerPlugin {
28
28
  }
29
29
  })
30
30
 
31
- if (this.config.dsmEnabled && message) {
31
+ if (
32
+ this.config.dsmEnabled &&
33
+ message?.properties?.headers?.[CONTEXT_PROPAGATION_KEY]
34
+ ) {
32
35
  const payloadSize = getAmqpMessageSize({ headers: message.properties.headers, content: message.content })
33
- const queue = fields.queue ?? fields.routingKey
36
+ const queue = fields.queue ? fields.queue : fields.routingKey
34
37
  this.tracer.decodeDataStreamsContext(message.properties.headers[CONTEXT_PROPAGATION_KEY])
35
38
  this.tracer
36
39
  .setCheckpoint(['direction:in', `topic:${queue}`, 'type:rabbitmq'], span, payloadSize)
@@ -53,7 +53,7 @@ class Kinesis extends BaseAwsSdkPlugin {
53
53
 
54
54
  // extract DSM context after as we might not have a parent-child but may have a DSM context
55
55
  this.responseExtractDSMContext(
56
- request.operation, response, span ?? null, streamName
56
+ request.operation, response, span || null, streamName
57
57
  )
58
58
  }
59
59
  })
@@ -143,62 +143,72 @@ class Kinesis extends BaseAwsSdkPlugin {
143
143
  }
144
144
 
145
145
  requestInject (span, request) {
146
- const operation = request.operation
147
- if (operation === 'putRecord' || operation === 'putRecords') {
148
- if (!request.params) {
149
- return
150
- }
151
- const traceData = {}
152
-
153
- // inject data with DD context
154
- this.tracer.inject(span, 'text_map', traceData)
155
- let injectPath
156
- if (request.params.Records && request.params.Records.length > 0) {
157
- injectPath = request.params.Records[0]
158
- } else if (request.params.Data) {
159
- injectPath = request.params
160
- } else {
161
- log.error('No valid payload passed, unable to pass trace context')
146
+ const { operation, params } = request
147
+ if (!params) return
148
+
149
+ let stream
150
+ switch (operation) {
151
+ case 'putRecord':
152
+ stream = params.StreamArn ? params.StreamArn : (params.StreamName ? params.StreamName : '')
153
+ this.injectToMessage(span, params, stream, true)
154
+ break
155
+ case 'putRecords':
156
+ stream = params.StreamArn ? params.StreamArn : (params.StreamName ? params.StreamName : '')
157
+ for (let i = 0; i < params.Records.length; i++) {
158
+ this.injectToMessage(span, params.Records[i], stream, i === 0)
159
+ }
160
+ }
161
+ }
162
+
163
+ injectToMessage (span, params, stream, injectTraceContext) {
164
+ if (!params) {
165
+ return
166
+ }
167
+
168
+ let parsedData
169
+ if (injectTraceContext || this.config.dsmEnabled) {
170
+ parsedData = this._tryParse(params.Data)
171
+ if (!parsedData) {
172
+ log.error('Unable to parse payload, unable to pass trace context or set DSM checkpoint (if enabled)')
162
173
  return
163
174
  }
175
+ }
164
176
 
165
- const parsedData = this._tryParse(injectPath.Data)
166
- if (parsedData) {
167
- parsedData._datadog = traceData
168
-
169
- // set DSM hash if enabled
170
- if (this.config.dsmEnabled) {
171
- // get payload size of request data
172
- const payloadSize = Buffer.from(JSON.stringify(parsedData)).byteLength
173
- let stream
174
- // users can optionally use either stream name or stream arn
175
- if (request.params && request.params.StreamArn) {
176
- stream = request.params.StreamArn
177
- } else if (request.params && request.params.StreamName) {
178
- stream = request.params.StreamName
179
- }
180
- const dataStreamsContext = this.tracer
181
- .setCheckpoint(['direction:out', `topic:${stream}`, 'type:kinesis'], span, payloadSize)
182
- if (dataStreamsContext) {
183
- const pathwayCtx = encodePathwayContext(dataStreamsContext)
184
- parsedData._datadog[CONTEXT_PROPAGATION_KEY] = pathwayCtx.toJSON()
185
- }
186
- }
177
+ const ddInfo = {}
178
+ // for now, we only want to inject to the first message, this may change for batches in the future
179
+ if (injectTraceContext) { this.tracer.inject(span, 'text_map', ddInfo) }
180
+
181
+ // set DSM hash if enabled
182
+ if (this.config.dsmEnabled) {
183
+ parsedData._datadog = ddInfo
184
+ const dataStreamsContext = this.setDSMCheckpoint(span, parsedData, stream)
185
+ if (dataStreamsContext) {
186
+ const pathwayCtx = encodePathwayContext(dataStreamsContext)
187
+ ddInfo[CONTEXT_PROPAGATION_KEY] = pathwayCtx.toJSON()
188
+ }
189
+ }
187
190
 
188
- const finalData = Buffer.from(JSON.stringify(parsedData))
189
- const byteSize = finalData.length
190
- // Kinesis max payload size is 1MB
191
- // So we must ensure adding DD context won't go over that (512b is an estimate)
192
- if (byteSize >= 1048576) {
193
- log.info('Payload size too large to pass context')
194
- return
195
- }
196
- injectPath.Data = finalData
197
- } else {
198
- log.error('Unable to parse payload, unable to pass trace context')
191
+ if (Object.keys(ddInfo).length !== 0) {
192
+ parsedData._datadog = ddInfo
193
+ const finalData = Buffer.from(JSON.stringify(parsedData))
194
+ const byteSize = finalData.length
195
+ // Kinesis max payload size is 1MB
196
+ // So we must ensure adding DD context won't go over that (512b is an estimate)
197
+ if (byteSize >= 1048576) {
198
+ log.info('Payload size too large to pass context')
199
+ return
199
200
  }
201
+ params.Data = finalData
200
202
  }
201
203
  }
204
+
205
+ setDSMCheckpoint (span, parsedData, stream) {
206
+ // get payload size of request data
207
+ const payloadSize = Buffer.from(JSON.stringify(parsedData)).byteLength
208
+ const dataStreamsContext = this.tracer
209
+ .setCheckpoint(['direction:out', `topic:${stream}`, 'type:kinesis'], span, payloadSize)
210
+ return dataStreamsContext
211
+ }
202
212
  }
203
213
 
204
214
  module.exports = Kinesis