dd-trace 5.51.0 → 5.53.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/LICENSE-3rdparty.csv +0 -6
  2. package/README.md +5 -0
  3. package/index.d.ts +88 -6
  4. package/package.json +3 -9
  5. package/packages/datadog-instrumentations/src/amqplib.js +8 -5
  6. package/packages/datadog-instrumentations/src/child_process.js +2 -1
  7. package/packages/datadog-instrumentations/src/confluentinc-kafka-javascript.js +406 -0
  8. package/packages/datadog-instrumentations/src/couchbase.js +2 -1
  9. package/packages/datadog-instrumentations/src/cucumber.js +43 -45
  10. package/packages/datadog-instrumentations/src/dns.js +16 -14
  11. package/packages/datadog-instrumentations/src/express.js +2 -6
  12. package/packages/datadog-instrumentations/src/fs.js +43 -51
  13. package/packages/datadog-instrumentations/src/helpers/hooks.js +2 -0
  14. package/packages/datadog-instrumentations/src/helpers/register.js +17 -12
  15. package/packages/datadog-instrumentations/src/http/client.js +2 -1
  16. package/packages/datadog-instrumentations/src/iovalkey.js +51 -0
  17. package/packages/datadog-instrumentations/src/jest.js +53 -40
  18. package/packages/datadog-instrumentations/src/kafkajs.js +21 -8
  19. package/packages/datadog-instrumentations/src/mocha/main.js +33 -46
  20. package/packages/datadog-instrumentations/src/mocha/utils.js +76 -74
  21. package/packages/datadog-instrumentations/src/mysql2.js +3 -1
  22. package/packages/datadog-instrumentations/src/net.js +27 -29
  23. package/packages/datadog-instrumentations/src/next.js +6 -14
  24. package/packages/datadog-instrumentations/src/pg.js +15 -7
  25. package/packages/datadog-instrumentations/src/playwright.js +64 -67
  26. package/packages/datadog-instrumentations/src/url.js +9 -17
  27. package/packages/datadog-instrumentations/src/vitest.js +66 -72
  28. package/packages/datadog-plugin-confluentinc-kafka-javascript/src/batch-consumer.js +11 -0
  29. package/packages/datadog-plugin-confluentinc-kafka-javascript/src/consumer.js +11 -0
  30. package/packages/datadog-plugin-confluentinc-kafka-javascript/src/index.js +19 -0
  31. package/packages/datadog-plugin-confluentinc-kafka-javascript/src/producer.js +11 -0
  32. package/packages/datadog-plugin-cucumber/src/index.js +32 -18
  33. package/packages/datadog-plugin-cypress/src/cypress-plugin.js +3 -0
  34. package/packages/datadog-plugin-dns/src/lookup.js +10 -5
  35. package/packages/datadog-plugin-dns/src/lookup_service.js +6 -2
  36. package/packages/datadog-plugin-dns/src/resolve.js +5 -2
  37. package/packages/datadog-plugin-dns/src/reverse.js +6 -2
  38. package/packages/datadog-plugin-fs/src/index.js +9 -2
  39. package/packages/datadog-plugin-iovalkey/src/index.js +18 -0
  40. package/packages/datadog-plugin-jest/src/index.js +17 -8
  41. package/packages/datadog-plugin-kafkajs/src/batch-consumer.js +2 -1
  42. package/packages/datadog-plugin-kafkajs/src/consumer.js +12 -21
  43. package/packages/datadog-plugin-kafkajs/src/producer.js +12 -5
  44. package/packages/datadog-plugin-kafkajs/src/utils.js +27 -0
  45. package/packages/datadog-plugin-langchain/src/index.js +0 -1
  46. package/packages/datadog-plugin-mocha/src/index.js +58 -35
  47. package/packages/datadog-plugin-net/src/ipc.js +6 -4
  48. package/packages/datadog-plugin-net/src/tcp.js +15 -9
  49. package/packages/datadog-plugin-pg/src/index.js +5 -1
  50. package/packages/datadog-plugin-playwright/src/index.js +29 -20
  51. package/packages/datadog-plugin-redis/src/index.js +8 -3
  52. package/packages/datadog-plugin-vitest/src/index.js +67 -44
  53. package/packages/datadog-shimmer/src/shimmer.js +164 -33
  54. package/packages/dd-trace/src/appsec/api_security_sampler.js +20 -12
  55. package/packages/dd-trace/src/appsec/graphql.js +2 -2
  56. package/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js +14 -9
  57. package/packages/dd-trace/src/appsec/index.js +15 -12
  58. package/packages/dd-trace/src/appsec/rasp/index.js +4 -2
  59. package/packages/dd-trace/src/appsec/rasp/utils.js +11 -6
  60. package/packages/dd-trace/src/appsec/sdk/user_blocking.js +2 -2
  61. package/packages/dd-trace/src/appsec/telemetry/index.js +1 -2
  62. package/packages/dd-trace/src/appsec/telemetry/rasp.js +0 -9
  63. package/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js +6 -6
  64. package/packages/dd-trace/src/baggage.js +36 -0
  65. package/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js +4 -2
  66. package/packages/dd-trace/src/config.js +14 -2
  67. package/packages/dd-trace/src/debugger/devtools_client/breakpoints.js +61 -7
  68. package/packages/dd-trace/src/debugger/devtools_client/index.js +10 -26
  69. package/packages/dd-trace/src/debugger/devtools_client/send.js +8 -7
  70. package/packages/dd-trace/src/debugger/devtools_client/snapshot/index.js +15 -7
  71. package/packages/dd-trace/src/debugger/devtools_client/state.js +22 -2
  72. package/packages/dd-trace/src/dogstatsd.js +2 -0
  73. package/packages/dd-trace/src/exporters/common/docker.js +13 -31
  74. package/packages/dd-trace/src/guardrails/telemetry.js +2 -5
  75. package/packages/dd-trace/src/llmobs/tagger.js +3 -3
  76. package/packages/dd-trace/src/llmobs/writers/base.js +33 -12
  77. package/packages/dd-trace/src/noop/proxy.js +5 -0
  78. package/packages/dd-trace/src/opentelemetry/context_manager.js +2 -0
  79. package/packages/dd-trace/src/opentracing/propagation/text_map.js +17 -9
  80. package/packages/dd-trace/src/plugin_manager.js +2 -0
  81. package/packages/dd-trace/src/plugins/index.js +4 -0
  82. package/packages/dd-trace/src/plugins/log_plugin.js +9 -20
  83. package/packages/dd-trace/src/plugins/outbound.js +11 -3
  84. package/packages/dd-trace/src/plugins/tracing.js +8 -4
  85. package/packages/dd-trace/src/plugins/util/test.js +1 -1
  86. package/packages/dd-trace/src/profiling/exporter_cli.js +1 -1
  87. package/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookup.js +1 -1
  88. package/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookupservice.js +1 -1
  89. package/packages/dd-trace/src/profiling/profilers/event_plugins/dns_resolve.js +2 -2
  90. package/packages/dd-trace/src/profiling/profilers/event_plugins/dns_reverse.js +1 -1
  91. package/packages/dd-trace/src/profiling/profilers/event_plugins/event.js +15 -14
  92. package/packages/dd-trace/src/proxy.js +12 -4
  93. package/packages/dd-trace/src/serverless.js +0 -48
  94. package/packages/dd-trace/src/service-naming/schemas/v0/messaging.js +8 -0
  95. package/packages/dd-trace/src/service-naming/schemas/v0/storage.js +8 -0
  96. package/packages/dd-trace/src/service-naming/schemas/v1/messaging.js +8 -0
  97. package/packages/dd-trace/src/service-naming/schemas/v1/storage.js +4 -0
  98. package/packages/dd-trace/src/standalone/product.js +3 -5
@@ -0,0 +1,406 @@
1
+ 'use strict'
2
+
3
+ const {
4
+ addHook,
5
+ channel,
6
+ AsyncResource
7
+ } = require('./helpers/instrument')
8
+ const shimmer = require('../../datadog-shimmer')
9
+
10
+ const log = require('../../dd-trace/src/log')
11
+
12
+ // Create channels for Confluent Kafka JavaScript
13
+ const channels = {
14
+ producerStart: channel('apm:@confluentinc/kafka-javascript:produce:start'),
15
+ producerFinish: channel('apm:@confluentinc/kafka-javascript:produce:finish'),
16
+ producerError: channel('apm:@confluentinc/kafka-javascript:produce:error'),
17
+ producerCommit: channel('apm:@confluentinc/kafka-javascript:produce:commit'),
18
+ consumerStart: channel('apm:@confluentinc/kafka-javascript:consume:start'),
19
+ consumerFinish: channel('apm:@confluentinc/kafka-javascript:consume:finish'),
20
+ consumerError: channel('apm:@confluentinc/kafka-javascript:consume:error'),
21
+ consumerCommit: channel('apm:@confluentinc/kafka-javascript:consume:commit'),
22
+
23
+ // batch operations
24
+ batchConsumerStart: channel('apm:@confluentinc/kafka-javascript:consume-batch:start'),
25
+ batchConsumerFinish: channel('apm:@confluentinc/kafka-javascript:consume-batch:finish'),
26
+ batchConsumerError: channel('apm:@confluentinc/kafka-javascript:consume-batch:error'),
27
+ batchConsumerCommit: channel('apm:@confluentinc/kafka-javascript:consume-batch:commit')
28
+ }
29
+
30
+ const disabledHeaderWeakSet = new WeakSet()
31
+
32
+ // we need to store the offset per partition per topic for the consumer to track offsets for DSM
33
+ const latestConsumerOffsets = new Map()
34
+
35
+ // Customize the instrumentation for Confluent Kafka JavaScript
36
+ addHook({ name: '@confluentinc/kafka-javascript', versions: ['>=1.0.0'] }, (module) => {
37
+ // Hook native module classes first
38
+ instrumentBaseModule(module)
39
+
40
+ // Then hook KafkaJS if it exists
41
+ if (module.KafkaJS) {
42
+ instrumentKafkaJS(module.KafkaJS)
43
+ }
44
+
45
+ return module
46
+ })
47
+
48
+ function instrumentBaseModule (module) {
49
+ // Helper function to wrap producer classes
50
+ function wrapProducerClass (ProducerClass, className) {
51
+ return shimmer.wrap(module, className, function wrapProducer (Original) {
52
+ return function wrappedProducer () {
53
+ const producer = new Original(...arguments)
54
+
55
+ // Hook the produce method
56
+ if (typeof producer?.produce === 'function') {
57
+ shimmer.wrap(producer, 'produce', function wrapProduce (produce) {
58
+ return function wrappedProduce (topic, partition, message, key, timestamp, opaque) {
59
+ if (!channels.producerStart.hasSubscribers) {
60
+ return produce.apply(this, arguments)
61
+ }
62
+
63
+ const brokers = this.globalConfig?.['bootstrap.servers']
64
+
65
+ const asyncResource = new AsyncResource('bound-anonymous-fn')
66
+ return asyncResource.runInAsyncScope(() => {
67
+ try {
68
+ channels.producerStart.publish({
69
+ topic,
70
+ messages: [{ key, value: message }],
71
+ bootstrapServers: brokers
72
+ })
73
+
74
+ const result = produce.apply(this, arguments)
75
+
76
+ channels.producerCommit.publish(undefined)
77
+ channels.producerFinish.publish(undefined)
78
+ return result
79
+ } catch (error) {
80
+ channels.producerError.publish(error)
81
+ channels.producerFinish.publish(undefined)
82
+ throw error
83
+ }
84
+ })
85
+ }
86
+ })
87
+ }
88
+
89
+ return producer
90
+ }
91
+ })
92
+ }
93
+
94
+ // Helper function to wrap consumer classes
95
+ function wrapConsumerClass (ConsumerClass, className) {
96
+ return shimmer.wrap(module, className, function wrapConsumer (Original) {
97
+ return function wrappedConsumer () {
98
+ const consumer = new Original(...arguments)
99
+ const groupId = this.groupId || (arguments[0]?.['group.id'])
100
+
101
+ // Wrap the consume method
102
+ if (typeof consumer?.consume === 'function') {
103
+ shimmer.wrap(consumer, 'consume', function wrapConsume (consume) {
104
+ return function wrappedConsume (numMessages, callback) {
105
+ if (!channels.consumerStart.hasSubscribers) {
106
+ return consume.apply(this, arguments)
107
+ }
108
+
109
+ if (!callback && typeof numMessages === 'function') {
110
+ callback = numMessages
111
+ }
112
+
113
+ // Handle callback-based consumption
114
+ if (typeof callback === 'function') {
115
+ return consume.call(this, numMessages, function wrappedCallback (err, messages) {
116
+ if (messages && messages.length > 0) {
117
+ messages.forEach(message => {
118
+ channels.consumerStart.publish({
119
+ topic: message?.topic,
120
+ partition: message?.partition,
121
+ message,
122
+ groupId
123
+ })
124
+ updateLatestOffset(message?.topic, message?.partition, message?.offset, groupId)
125
+ })
126
+ }
127
+
128
+ if (err) {
129
+ channels.consumerError.publish(err)
130
+ }
131
+
132
+ try {
133
+ const result = callback.apply(this, arguments)
134
+ channels.consumerFinish.publish(undefined)
135
+ return result
136
+ } catch (error) {
137
+ channels.consumerError.publish(error)
138
+ channels.consumerFinish.publish(undefined)
139
+ throw error
140
+ }
141
+ })
142
+ }
143
+
144
+ // If no callback is provided, just pass through
145
+ return consume.apply(this, arguments)
146
+ }
147
+ })
148
+
149
+ // Wrap the commit method for handling offset commits
150
+ if (consumer && typeof consumer.commit === 'function') {
151
+ shimmer.wrap(consumer, 'commit', wrapCommit)
152
+ }
153
+ }
154
+
155
+ return consumer
156
+ }
157
+ })
158
+ }
159
+
160
+ // Wrap Producer and KafkaProducer classes if they exist
161
+ if (typeof module.Producer === 'function') {
162
+ wrapProducerClass(module.Producer, 'Producer')
163
+ }
164
+ if (typeof module.KafkaProducer === 'function') {
165
+ wrapProducerClass(module.KafkaProducer, 'KafkaProducer')
166
+ }
167
+
168
+ // Wrap Consumer and KafkaConsumer classes if they exist
169
+ if (typeof module.Consumer === 'function') {
170
+ wrapConsumerClass(module.Consumer, 'Consumer')
171
+ }
172
+ if (typeof module.KafkaConsumer === 'function') {
173
+ wrapConsumerClass(module.KafkaConsumer, 'KafkaConsumer')
174
+ }
175
+ }
176
+
177
+ function instrumentKafkaJS (kafkaJS) {
178
+ // Hook the Kafka class if it exists
179
+ if (typeof kafkaJS?.Kafka === 'function') {
180
+ shimmer.wrap(kafkaJS, 'Kafka', function wrapKafka (OriginalKafka) {
181
+ return function KafkaWrapper (options) {
182
+ const kafka = new OriginalKafka(options)
183
+ const kafkaJSOptions = options?.kafkaJS || options
184
+ const brokers = kafkaJSOptions?.brokers ? kafkaJSOptions.brokers.join(',') : ''
185
+
186
+ // Store brokers for later use
187
+ kafka._ddBrokers = brokers
188
+
189
+ // Wrap the producer method if it exists
190
+ if (typeof kafka?.producer === 'function') {
191
+ shimmer.wrap(kafka, 'producer', function wrapProducerMethod (producerMethod) {
192
+ return function wrappedProducerMethod () {
193
+ const producer = producerMethod.apply(this, arguments)
194
+
195
+ if (!brokers && arguments?.[0]?.['bootstrap.servers']) {
196
+ kafka._ddBrokers = arguments[0]['bootstrap.servers']
197
+ }
198
+
199
+ // Wrap the send method of the producer
200
+ if (producer && typeof producer.send === 'function') {
201
+ shimmer.wrap(producer, 'send', function wrapSend (send) {
202
+ return function wrappedSend (payload) {
203
+ if (!channels.producerStart.hasSubscribers) {
204
+ return send.apply(this, arguments)
205
+ }
206
+
207
+ const asyncResource = new AsyncResource('bound-anonymous-fn')
208
+ return asyncResource.runInAsyncScope(() => {
209
+ try {
210
+ channels.producerStart.publish({
211
+ topic: payload?.topic,
212
+ messages: payload?.messages || [],
213
+ bootstrapServers: kafka._ddBrokers,
214
+ disableHeaderInjection: disabledHeaderWeakSet.has(producer)
215
+ })
216
+
217
+ const result = send.apply(this, arguments)
218
+
219
+ result.then(
220
+ asyncResource.bind(res => {
221
+ channels.producerCommit.publish(res)
222
+ channels.producerFinish.publish(undefined)
223
+ }),
224
+ asyncResource.bind(err => {
225
+ if (err) {
226
+ // Fixes bug where we would inject message headers for kafka brokers
227
+ // that don't support headers (version <0.11). On the error, we disable
228
+ // header injection. Tnfortunately the error name / type is not more specific.
229
+ // This approach is implemented by other tracers as well.
230
+ if (err.name === 'KafkaJSError' && err.type === 'ERR_UNKNOWN') {
231
+ disabledHeaderWeakSet.add(producer)
232
+ log.error('Kafka Broker responded with UNKNOWN_SERVER_ERROR (-1). ' +
233
+ 'Please look at broker logs for more information. ' +
234
+ 'Tracer message header injection for Kafka is disabled.')
235
+ }
236
+ channels.producerError.publish(err)
237
+ }
238
+ channels.producerFinish.publish(undefined)
239
+ })
240
+ )
241
+
242
+ return result
243
+ } catch (e) {
244
+ channels.producerError.publish(e)
245
+ channels.producerFinish.publish(undefined)
246
+ throw e
247
+ }
248
+ })
249
+ }
250
+ })
251
+ }
252
+
253
+ return producer
254
+ }
255
+ })
256
+ }
257
+
258
+ // Wrap the consumer method if it exists
259
+ if (typeof kafka?.consumer === 'function') {
260
+ shimmer.wrap(kafka, 'consumer', function wrapConsumerMethod (consumerMethod) {
261
+ return function wrappedConsumerMethod (config) {
262
+ const consumer = consumerMethod.apply(this, arguments)
263
+ const groupId = getGroupId(config)
264
+
265
+ // Wrap the run method for handling message consumption
266
+ if (typeof consumer?.run === 'function') {
267
+ shimmer.wrap(consumer, 'run', function wrapRun (run) {
268
+ return function wrappedRun (options) {
269
+ if (!channels.consumerStart.hasSubscribers) {
270
+ return run.apply(this, arguments)
271
+ }
272
+
273
+ const eachMessage = options?.eachMessage
274
+ const eachBatch = options?.eachBatch
275
+ if (eachMessage) {
276
+ options.eachMessage = wrapKafkaCallback(
277
+ eachMessage,
278
+ {
279
+ startCh: channels.consumerStart,
280
+ commitCh: channels.consumerCommit,
281
+ finishCh: channels.consumerFinish,
282
+ errorCh: channels.consumerError
283
+ },
284
+ (payload) => {
285
+ return {
286
+ topic: payload?.topic,
287
+ partition: payload?.partition,
288
+ offset: payload?.message?.offset,
289
+ message: payload?.message,
290
+ groupId
291
+ }
292
+ })
293
+ } else if (eachBatch) {
294
+ options.eachBatch = wrapKafkaCallback(
295
+ eachBatch,
296
+ {
297
+ startCh: channels.batchConsumerStart,
298
+ commitCh: channels.batchConsumerCommit,
299
+ finishCh: channels.batchConsumerFinish,
300
+ errorCh: channels.batchConsumerError
301
+ },
302
+ (payload) => {
303
+ const { batch } = payload
304
+ return {
305
+ topic: batch?.topic,
306
+ partition: batch?.partition,
307
+ offset: batch?.messages[batch?.messages?.length - 1]?.offset,
308
+ messages: batch?.messages,
309
+ groupId
310
+ }
311
+ }
312
+ )
313
+ }
314
+
315
+ return run.apply(this, arguments)
316
+ }
317
+ })
318
+ }
319
+
320
+ // Wrap the commit method for handling offset commits
321
+ if (typeof consumer?.commitOffsets === 'function') {
322
+ shimmer.wrap(consumer, 'commitOffsets', wrapCommit)
323
+ }
324
+
325
+ return consumer
326
+ }
327
+ })
328
+ }
329
+
330
+ return kafka
331
+ }
332
+ })
333
+ }
334
+ }
335
+
336
+ function wrapCommit (commit) {
337
+ return function wrappedCommit (options) {
338
+ if (!channels.consumerCommit.hasSubscribers) {
339
+ return commit.apply(this, arguments)
340
+ }
341
+
342
+ const result = commit.apply(this, arguments)
343
+ channels.consumerCommit.publish(getLatestOffsets())
344
+ latestConsumerOffsets.clear()
345
+ return result
346
+ }
347
+ }
348
+
349
+ function wrapKafkaCallback (callback, { startCh, commitCh, finishCh, errorCh }, getPayload) {
350
+ return function wrappedKafkaCallback (payload) {
351
+ const commitPayload = getPayload(payload)
352
+
353
+ const asyncResource = new AsyncResource('bound-anonymous-fn')
354
+ return asyncResource.runInAsyncScope(() => {
355
+ startCh.publish(commitPayload)
356
+
357
+ updateLatestOffset(commitPayload?.topic, commitPayload?.partition, commitPayload?.offset, commitPayload?.groupId)
358
+
359
+ try {
360
+ const result = callback.apply(this, arguments)
361
+
362
+ if (result && typeof result.then === 'function') {
363
+ return result
364
+ .then(asyncResource.bind(res => {
365
+ finishCh.publish(undefined)
366
+ return res
367
+ }))
368
+ .catch(asyncResource.bind(err => {
369
+ errorCh.publish(err)
370
+ finishCh.publish(undefined)
371
+ throw err
372
+ }))
373
+ } else {
374
+ finishCh.publish(undefined)
375
+ return result
376
+ }
377
+ } catch (error) {
378
+ errorCh.publish(error)
379
+ finishCh.publish(undefined)
380
+ throw error
381
+ }
382
+ })
383
+ }
384
+ }
385
+
386
+ function getGroupId (config) {
387
+ if (!config) return ''
388
+ if (config.kafkaJS?.groupId) return config.kafkaJS.groupId
389
+ if (config?.groupId) return config.groupId
390
+ if (config['group.id']) return config['group.id']
391
+ return ''
392
+ }
393
+
394
+ function updateLatestOffset (topic, partition, offset, groupId) {
395
+ const key = `${topic}:${partition}`
396
+ latestConsumerOffsets.set(key, {
397
+ topic,
398
+ partition,
399
+ offset,
400
+ groupId
401
+ })
402
+ }
403
+
404
+ function getLatestOffsets () {
405
+ return Array.from(latestConsumerOffsets.values())
406
+ }
@@ -1,5 +1,6 @@
1
1
  'use strict'
2
2
 
3
+ const { errorMonitor } = require('events')
3
4
  const {
4
5
  channel,
5
6
  addHook,
@@ -186,7 +187,7 @@ addHook({ name: 'couchbase', file: 'lib/bucket.js', versions: ['^2.6.12'] }, Buc
186
187
  finishCh.publish(undefined)
187
188
  }))
188
189
 
189
- emitter.once('error', asyncResource.bind((error) => {
190
+ emitter.once(errorMonitor, asyncResource.bind((error) => {
190
191
  errorCh.publish(error)
191
192
  finishCh.publish(undefined)
192
193
  }))
@@ -8,6 +8,7 @@ const log = require('../../dd-trace/src/log')
8
8
  const testStartCh = channel('ci:cucumber:test:start')
9
9
  const testRetryCh = channel('ci:cucumber:test:retry')
10
10
  const testFinishCh = channel('ci:cucumber:test:finish') // used for test steps too
11
+ const testFnCh = channel('ci:cucumber:test:fn')
11
12
 
12
13
  const testStepStartCh = channel('ci:cucumber:test-step:start')
13
14
 
@@ -52,7 +53,7 @@ const patched = new WeakSet()
52
53
 
53
54
  const lastStatusByPickleId = new Map()
54
55
  const numRetriesByPickleId = new Map()
55
- const numAttemptToAsyncResource = new Map()
56
+ const numAttemptToCtx = new Map()
56
57
  const newTestsByTestFullname = new Map()
57
58
 
58
59
  let eventDataCollector = null
@@ -227,16 +228,12 @@ function wrapRun (pl, isLatestVersion) {
227
228
  patched.add(pl)
228
229
 
229
230
  shimmer.wrap(pl.prototype, 'run', run => function () {
230
- if (!testStartCh.hasSubscribers) {
231
+ if (!testFinishCh.hasSubscribers) {
231
232
  return run.apply(this, arguments)
232
233
  }
233
234
 
234
235
  let numAttempt = 0
235
236
 
236
- const asyncResource = new AsyncResource('bound-anonymous-fn')
237
-
238
- numAttemptToAsyncResource.set(numAttempt, asyncResource)
239
-
240
237
  const testFileAbsolutePath = this.pickle.uri
241
238
 
242
239
  const testSourceLine = this.gherkinDocument?.feature?.location?.line
@@ -247,9 +244,9 @@ function wrapRun (pl, isLatestVersion) {
247
244
  testSourceLine,
248
245
  isParallel: !!process.env.CUCUMBER_WORKER_ID
249
246
  }
250
- asyncResource.runInAsyncScope(() => {
251
- testStartCh.publish(testStartPayload)
252
- })
247
+ const ctx = testStartPayload
248
+ numAttemptToCtx.set(numAttempt, ctx)
249
+ testStartCh.runStores(ctx, () => { })
253
250
  const promises = {}
254
251
  try {
255
252
  this.eventBroadcaster.on('envelope', shimmer.wrapFunction(null, () => async (testCase) => {
@@ -265,7 +262,7 @@ function wrapRun (pl, isLatestVersion) {
265
262
  // ignore error
266
263
  }
267
264
 
268
- const failedAttemptAsyncResource = numAttemptToAsyncResource.get(numAttempt)
265
+ const failedAttemptCtx = numAttemptToCtx.get(numAttempt)
269
266
  const isFirstAttempt = numAttempt++ === 0
270
267
  const isAtrRetry = !isFirstAttempt && isFlakyTestRetriesEnabled
271
268
 
@@ -273,23 +270,19 @@ function wrapRun (pl, isLatestVersion) {
273
270
  await promises.hitBreakpointPromise
274
271
  }
275
272
 
276
- failedAttemptAsyncResource.runInAsyncScope(() => {
277
- // the current span will be finished and a new one will be created
278
- testRetryCh.publish({ isFirstAttempt, error, isAtrRetry })
279
- })
273
+ // the current span will be finished and a new one will be created
274
+ testRetryCh.publish({ isFirstAttempt, error, isAtrRetry, ...failedAttemptCtx.currentStore })
280
275
 
281
- const newAsyncResource = new AsyncResource('bound-anonymous-fn')
282
- numAttemptToAsyncResource.set(numAttempt, newAsyncResource)
276
+ const newCtx = { ...testStartPayload, promises }
277
+ numAttemptToCtx.set(numAttempt, newCtx)
283
278
 
284
- newAsyncResource.runInAsyncScope(() => {
285
- testStartCh.publish({ ...testStartPayload, promises }) // a new span will be created
286
- })
279
+ testStartCh.runStores(newCtx, () => { })
287
280
  }
288
281
  }
289
282
  }))
290
283
  let promise
291
284
 
292
- asyncResource.runInAsyncScope(() => {
285
+ testFnCh.runStores(ctx, () => {
293
286
  promise = run.apply(this, arguments)
294
287
  })
295
288
  promise.finally(async () => {
@@ -309,6 +302,7 @@ function wrapRun (pl, isLatestVersion) {
309
302
  let isAttemptToFixRetry = false
310
303
  let hasFailedAllRetries = false
311
304
  let hasPassedAllRetries = false
305
+ let hasFailedAttemptToFix = false
312
306
  let isDisabled = false
313
307
  let isQuarantined = false
314
308
 
@@ -330,6 +324,7 @@ function wrapRun (pl, isLatestVersion) {
330
324
  }, { pass: 0, fail: 0 })
331
325
  hasFailedAllRetries = fail === testManagementAttemptToFixRetries + 1
332
326
  hasPassedAllRetries = pass === testManagementAttemptToFixRetries + 1
327
+ hasFailedAttemptToFix = fail > 0
333
328
  }
334
329
  }
335
330
  }
@@ -341,38 +336,40 @@ function wrapRun (pl, isLatestVersion) {
341
336
  isEfdRetry = numRetries > 0
342
337
  }
343
338
 
344
- const attemptAsyncResource = numAttemptToAsyncResource.get(numAttempt)
339
+ const attemptCtx = numAttemptToCtx.get(numAttempt)
345
340
 
346
341
  const error = getErrorFromCucumberResult(result)
347
342
 
348
343
  if (promises.hitBreakpointPromise) {
349
344
  await promises.hitBreakpointPromise
350
345
  }
351
- attemptAsyncResource.runInAsyncScope(() => {
352
- testFinishCh.publish({
353
- status,
354
- skipReason,
355
- error,
356
- isNew,
357
- isEfdRetry,
358
- isFlakyRetry: numAttempt > 0,
359
- isAttemptToFix,
360
- isAttemptToFixRetry,
361
- hasFailedAllRetries,
362
- hasPassedAllRetries,
363
- isDisabled,
364
- isQuarantined
365
- })
346
+ testFinishCh.publish({
347
+ status,
348
+ skipReason,
349
+ error,
350
+ isNew,
351
+ isEfdRetry,
352
+ isFlakyRetry: numAttempt > 0,
353
+ isAttemptToFix,
354
+ isAttemptToFixRetry,
355
+ hasFailedAllRetries,
356
+ hasPassedAllRetries,
357
+ hasFailedAttemptToFix,
358
+ isDisabled,
359
+ isQuarantined,
360
+ ...attemptCtx.currentStore
366
361
  })
367
362
  })
368
363
  return promise
369
364
  } catch (err) {
370
- errorCh.publish(err)
371
- throw err
365
+ ctx.err = err
366
+ errorCh.runStores(ctx, () => {
367
+ throw err
368
+ })
372
369
  }
373
370
  })
374
371
  shimmer.wrap(pl.prototype, 'runStep', runStep => function () {
375
- if (!testStepStartCh.hasSubscribers) {
372
+ if (!testFinishCh.hasSubscribers) {
376
373
  return runStep.apply(this, arguments)
377
374
  }
378
375
  const testStep = arguments[0]
@@ -384,9 +381,8 @@ function wrapRun (pl, isLatestVersion) {
384
381
  resource = testStep.isHook ? 'hook' : testStep.pickleStep.text
385
382
  }
386
383
 
387
- const asyncResource = new AsyncResource('bound-anonymous-fn')
388
- return asyncResource.runInAsyncScope(() => {
389
- testStepStartCh.publish({ resource })
384
+ const ctx = { resource }
385
+ return testStepStartCh.runStores(ctx, () => {
390
386
  try {
391
387
  const promise = runStep.apply(this, arguments)
392
388
 
@@ -395,12 +391,14 @@ function wrapRun (pl, isLatestVersion) {
395
391
  ? getStatusFromResultLatest(result)
396
392
  : getStatusFromResult(result)
397
393
 
398
- testFinishCh.publish({ isStep: true, status, skipReason, errorMessage })
394
+ testFinishCh.publish({ isStep: true, status, skipReason, errorMessage, ...ctx.currentStore })
399
395
  })
400
396
  return promise
401
397
  } catch (err) {
402
- errorCh.publish(err)
403
- throw err
398
+ ctx.err = err
399
+ errorCh.runStores(ctx, () => {
400
+ throw err
401
+ })
404
402
  }
405
403
  })
406
404
  })
@@ -1,6 +1,6 @@
1
1
  'use strict'
2
2
 
3
- const { channel, addHook, AsyncResource } = require('./helpers/instrument')
3
+ const { channel, addHook } = require('./helpers/instrument')
4
4
  const shimmer = require('../../datadog-shimmer')
5
5
 
6
6
  const rrtypes = {
@@ -53,7 +53,7 @@ function wrap (prefix, fn, expectedArgs, rrtype) {
53
53
  const errorCh = channel(prefix + ':error')
54
54
 
55
55
  const wrapped = function () {
56
- const cb = AsyncResource.bind(arguments[arguments.length - 1])
56
+ const cb = arguments[arguments.length - 1]
57
57
  if (
58
58
  !startCh.hasSubscribers ||
59
59
  arguments.length < expectedArgs ||
@@ -62,30 +62,32 @@ function wrap (prefix, fn, expectedArgs, rrtype) {
62
62
  return fn.apply(this, arguments)
63
63
  }
64
64
 
65
- const startArgs = Array.from(arguments)
66
- startArgs.pop() // gets rid of the callback
65
+ const args = Array.from(arguments)
66
+ args.pop() // gets rid of the callback
67
67
  if (rrtype) {
68
- startArgs.push(rrtype)
68
+ args.push(rrtype)
69
69
  }
70
70
 
71
- const asyncResource = new AsyncResource('bound-anonymous-fn')
72
- return asyncResource.runInAsyncScope(() => {
73
- startCh.publish(startArgs)
71
+ const ctx = { args }
74
72
 
75
- arguments[arguments.length - 1] = shimmer.wrapFunction(cb, cb => asyncResource.bind(function (error, result) {
73
+ return startCh.runStores(ctx, () => {
74
+ arguments[arguments.length - 1] = shimmer.wrapFunction(cb, cb => function (error, result, ...args) {
76
75
  if (error) {
77
- errorCh.publish(error)
76
+ ctx.error = error
77
+ errorCh.publish(ctx)
78
78
  }
79
- finishCh.publish(result)
80
- cb.apply(this, arguments)
81
- }))
79
+
80
+ ctx.result = result
81
+ finishCh.runStores(ctx, cb, this, error, result, ...args)
82
+ })
82
83
 
83
84
  try {
84
85
  return fn.apply(this, arguments)
85
86
  // TODO deal with promise versions when we support `dns/promises`
86
87
  } catch (error) {
87
88
  error.stack // trigger getting the stack at the original throwing point
88
- errorCh.publish(error)
89
+ ctx.error = error
90
+ errorCh.publish(ctx)
89
91
 
90
92
  throw error
91
93
  }