dd-trace 5.88.0 → 5.90.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. package/LICENSE-3rdparty.csv +0 -3
  2. package/ext/tags.js +2 -0
  3. package/index.d.ts +40 -0
  4. package/package.json +18 -14
  5. package/packages/datadog-instrumentations/src/azure-durable-functions.js +75 -0
  6. package/packages/datadog-instrumentations/src/cucumber.js +40 -1
  7. package/packages/datadog-instrumentations/src/elasticsearch.js +12 -3
  8. package/packages/datadog-instrumentations/src/helpers/hooks.js +1 -0
  9. package/packages/datadog-instrumentations/src/helpers/rewriter/index.js +26 -111
  10. package/packages/datadog-instrumentations/src/helpers/rewriter/{compiler.js → orchestrion/compiler.js} +5 -5
  11. package/packages/datadog-instrumentations/src/helpers/rewriter/orchestrion/index.js +43 -0
  12. package/packages/datadog-instrumentations/src/helpers/rewriter/orchestrion/matcher.js +49 -0
  13. package/packages/datadog-instrumentations/src/helpers/rewriter/orchestrion/transformer.js +121 -0
  14. package/packages/datadog-instrumentations/src/helpers/rewriter/{transforms.js → orchestrion/transforms.js} +6 -6
  15. package/packages/datadog-instrumentations/src/jest.js +123 -43
  16. package/packages/datadog-instrumentations/src/mocha/main.js +10 -4
  17. package/packages/datadog-instrumentations/src/mocha/utils.js +6 -0
  18. package/packages/datadog-instrumentations/src/mocha/worker.js +10 -2
  19. package/packages/datadog-instrumentations/src/playwright.js +20 -2
  20. package/packages/datadog-instrumentations/src/prisma.js +4 -2
  21. package/packages/datadog-instrumentations/src/vitest.js +16 -0
  22. package/packages/datadog-plugin-apollo/src/gateway/execute.js +8 -0
  23. package/packages/datadog-plugin-apollo/src/gateway/fetch.js +5 -0
  24. package/packages/datadog-plugin-apollo/src/gateway/plan.js +8 -0
  25. package/packages/datadog-plugin-apollo/src/gateway/postprocessing.js +5 -0
  26. package/packages/datadog-plugin-apollo/src/gateway/request.js +4 -3
  27. package/packages/datadog-plugin-apollo/src/gateway/validate.js +4 -3
  28. package/packages/datadog-plugin-apollo/src/index.js +28 -0
  29. package/packages/datadog-plugin-azure-durable-functions/src/index.js +49 -0
  30. package/packages/datadog-plugin-cypress/src/cypress-plugin.js +47 -6
  31. package/packages/datadog-plugin-cypress/src/source-map-utils.js +297 -0
  32. package/packages/datadog-plugin-cypress/src/support.js +4 -1
  33. package/packages/datadog-plugin-jest/src/index.js +6 -0
  34. package/packages/datadog-plugin-playwright/src/index.js +35 -8
  35. package/packages/dd-trace/src/aiguard/noop.js +1 -1
  36. package/packages/dd-trace/src/aiguard/sdk.js +18 -5
  37. package/packages/dd-trace/src/appsec/api_security_sampler.js +22 -1
  38. package/packages/dd-trace/src/appsec/index.js +11 -1
  39. package/packages/dd-trace/src/appsec/reporter.js +28 -11
  40. package/packages/dd-trace/src/appsec/waf/index.js +1 -1
  41. package/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js +4 -4
  42. package/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js +1 -0
  43. package/packages/dd-trace/src/config/index.js +3 -0
  44. package/packages/dd-trace/src/config/supported-configurations.json +17 -0
  45. package/packages/dd-trace/src/constants.js +1 -0
  46. package/packages/dd-trace/src/datastreams/checkpointer.js +13 -0
  47. package/packages/dd-trace/src/datastreams/index.js +3 -0
  48. package/packages/dd-trace/src/datastreams/manager.js +9 -0
  49. package/packages/dd-trace/src/datastreams/processor.js +126 -3
  50. package/packages/dd-trace/src/encode/agentless-ci-visibility.js +1 -8
  51. package/packages/dd-trace/src/encode/agentless-json.js +82 -23
  52. package/packages/dd-trace/src/exporters/agent/writer.js +7 -8
  53. package/packages/dd-trace/src/exporters/agentless/index.js +58 -15
  54. package/packages/dd-trace/src/exporters/agentless/writer.js +35 -18
  55. package/packages/dd-trace/src/llmobs/constants/tags.js +2 -0
  56. package/packages/dd-trace/src/llmobs/plugins/anthropic.js +9 -0
  57. package/packages/dd-trace/src/llmobs/tagger.js +8 -0
  58. package/packages/dd-trace/src/opentracing/propagation/text_map.js +1 -0
  59. package/packages/dd-trace/src/pkg.js +1 -1
  60. package/packages/dd-trace/src/plugins/apollo.js +7 -2
  61. package/packages/dd-trace/src/plugins/index.js +1 -0
  62. package/packages/dd-trace/src/plugins/util/ci.js +95 -3
  63. package/packages/dd-trace/src/plugins/util/inferred_proxy.js +36 -2
  64. package/packages/dd-trace/src/plugins/util/web.js +31 -11
  65. package/packages/dd-trace/src/proxy.js +2 -1
  66. package/packages/dd-trace/src/runtime_metrics/runtime_metrics.js +7 -0
  67. package/packages/dd-trace/src/service-naming/schemas/v0/serverless.js +4 -0
  68. package/packages/dd-trace/src/service-naming/schemas/v1/serverless.js +4 -0
  69. package/packages/dd-trace/src/standalone/product.js +2 -1
  70. package/packages/dd-trace/src/startup-log.js +52 -18
  71. package/vendor/dist/@datadog/sketches-js/index.js +1 -1
  72. package/vendor/dist/@datadog/source-map/index.js +1 -1
  73. package/vendor/dist/@isaacs/ttlcache/index.js +1 -1
  74. package/vendor/dist/@opentelemetry/core/index.js +1 -1
  75. package/vendor/dist/@opentelemetry/resources/index.js +1 -1
  76. package/vendor/dist/astring/index.js +1 -1
  77. package/vendor/dist/crypto-randomuuid/index.js +1 -1
  78. package/vendor/dist/escape-string-regexp/index.js +1 -1
  79. package/vendor/dist/esquery/index.js +1 -1
  80. package/vendor/dist/ignore/index.js +1 -1
  81. package/vendor/dist/istanbul-lib-coverage/index.js +1 -1
  82. package/vendor/dist/jest-docblock/index.js +1 -1
  83. package/vendor/dist/jsonpath-plus/index.js +1 -1
  84. package/vendor/dist/limiter/index.js +1 -1
  85. package/vendor/dist/lodash.sortby/index.js +1 -1
  86. package/vendor/dist/lru-cache/index.js +1 -1
  87. package/vendor/dist/meriyah/index.js +1 -1
  88. package/vendor/dist/module-details-from-path/index.js +1 -1
  89. package/vendor/dist/mutexify/promise/index.js +1 -1
  90. package/vendor/dist/opentracing/index.js +1 -1
  91. package/vendor/dist/path-to-regexp/index.js +1 -1
  92. package/vendor/dist/pprof-format/index.js +1 -1
  93. package/vendor/dist/protobufjs/index.js +1 -1
  94. package/vendor/dist/protobufjs/minimal/index.js +1 -1
  95. package/vendor/dist/retry/index.js +1 -1
  96. package/vendor/dist/rfdc/index.js +1 -1
  97. package/vendor/dist/semifies/index.js +1 -1
  98. package/vendor/dist/shell-quote/index.js +1 -1
  99. package/vendor/dist/source-map/index.js +1 -1
  100. package/vendor/dist/source-map/lib/util/index.js +1 -1
  101. package/vendor/dist/tlhunter-sorted-set/index.js +1 -1
  102. package/vendor/dist/ttl-set/index.js +1 -1
  103. package/packages/datadog-instrumentations/src/helpers/rewriter/transformer.js +0 -21
@@ -61,7 +61,6 @@
61
61
  "crypto-randomuuid","npm:crypto-randomuuid","['MIT']","['Stephen Belanger']"
62
62
  "dc-polyfill","https://github.com/DataDog/dc-polyfill","['MIT']","['Thomas Hunter II']"
63
63
  "dd-trace","https://github.com/DataDog/dd-trace-js","['(Apache-2.0 OR BSD-3-Clause)']","['Datadog Inc. <info@datadoghq.com>']"
64
- "delay","https://github.com/sindresorhus/delay","['MIT']","['Sindre Sorhus']"
65
64
  "detect-newline","https://github.com/sindresorhus/detect-newline","['MIT']","['Sindre Sorhus']"
66
65
  "escape-string-regexp","https://github.com/sindresorhus/escape-string-regexp","['MIT']","['Sindre Sorhus']"
67
66
  "esquery","https://github.com/estools/esquery","['BSD-3-Clause']","['Joel Feenstra']"
@@ -85,7 +84,6 @@
85
84
  "node-gyp-build","https://github.com/prebuild/node-gyp-build","['MIT']","['Mathias Buus']"
86
85
  "opentracing","https://github.com/opentracing/opentracing-javascript","['Apache-2.0']","['opentracing']"
87
86
  "oxc-parser","https://github.com/oxc-project/oxc","['MIT']","['Boshen and oxc contributors']"
88
- "p-limit","https://github.com/sindresorhus/p-limit","['MIT']","['Sindre Sorhus']"
89
87
  "path-to-regexp","https://github.com/pillarjs/path-to-regexp","['MIT']","['pillarjs']"
90
88
  "pprof-format","https://github.com/DataDog/pprof-format","['MIT']","['Datadog Inc.']"
91
89
  "protobufjs","https://github.com/protobufjs/protobuf.js","['BSD-3-Clause']","['Daniel Wirtz']"
@@ -100,6 +98,5 @@
100
98
  "tslib","https://github.com/microsoft/tslib","['0BSD']","['Microsoft Corp.']"
101
99
  "ttl-set","https://github.com/watson/ttl-set","['MIT']","['Thomas Watson']"
102
100
  "undici-types","https://github.com/nodejs/undici","['MIT']","['nodejs']"
103
- "yocto-queue","https://github.com/sindresorhus/yocto-queue","['MIT']","['Sindre Sorhus']"
104
101
  "aws-lambda-nodejs-runtime-interface-client","https://github.com/aws/aws-lambda-nodejs-runtime-interface-client/blob/v2.1.0/src/utils/UserFunction.ts","['Apache-2.0']","['Amazon.com Inc. or its affiliates']"
105
102
  "is-git-url","https://github.com/jonschlinkert/is-git-url/blob/396965ffabf2f46656c8af4c47bef1d69f09292e/index.js#L9C15-L9C87","['MIT']","['Jon Schlinkert']"
package/ext/tags.js CHANGED
@@ -30,6 +30,8 @@ const tags = {
30
30
 
31
31
  // DSM Specific
32
32
  PATHWAY_HASH: 'pathway.hash',
33
+ DSM_TRANSACTION_ID: 'dsm.transaction.id',
34
+ DSM_TRANSACTION_CHECKPOINT: 'dsm.transaction.checkpoint',
33
35
  }
34
36
 
35
37
  // Deprecated
package/index.d.ts CHANGED
@@ -228,6 +228,7 @@ interface Plugins {
228
228
  "azure-event-hubs": tracer.plugins.azure_event_hubs;
229
229
  "azure-functions": tracer.plugins.azure_functions;
230
230
  "azure-service-bus": tracer.plugins.azure_service_bus;
231
+ "azure-durable-functions": tracer.plugins.azure_durable_functions
231
232
  "bullmq": tracer.plugins.bullmq;
232
233
  "bunyan": tracer.plugins.bunyan;
233
234
  "cassandra-driver": tracer.plugins.cassandra_driver;
@@ -1339,6 +1340,15 @@ declare namespace tracer {
1339
1340
  * @returns The DSM context associated with the current pathway.
1340
1341
  */
1341
1342
  setConsumeCheckpoint (type: string, source: string, carrier: any, manualCheckpoint?: boolean): any;
1343
+
1344
+ /**
1345
+ * Records a transaction ID at a named checkpoint without pathway propagation.
1346
+ * Tags the active span (or the provided span) with dsm.transaction.id and dsm.transaction.checkpoint.
1347
+ * @param transactionId The unique transaction identifier (truncated to 255 UTF-8 bytes).
1348
+ * @param checkpointName The logical checkpoint name (stable 1-byte ID per process lifetime).
1349
+ * @param span The span to tag. Defaults to the currently active span.
1350
+ */
1351
+ trackTransaction(transactionId: string, checkpointName: string, span?: Span | null): void;
1342
1352
  }
1343
1353
 
1344
1354
  export interface EventTrackingV2 {
@@ -1617,6 +1627,10 @@ declare namespace tracer {
1617
1627
  * List of tags associated with the evaluation (e.g. indirect-prompt-injection)
1618
1628
  */
1619
1629
  tags: string[];
1630
+ /**
1631
+ * Sensitive Data Scanner findings from the evaluation.
1632
+ */
1633
+ sds: Object[];
1620
1634
  }
1621
1635
 
1622
1636
  /**
@@ -1632,6 +1646,10 @@ declare namespace tracer {
1632
1646
  * List of tags associated with the evaluation (e.g. indirect-prompt-injection)
1633
1647
  */
1634
1648
  tags: string[];
1649
+ /**
1650
+ * Sensitive Data Scanner findings from the evaluation.
1651
+ */
1652
+ sds: Object[];
1635
1653
  }
1636
1654
 
1637
1655
  /**
@@ -2025,6 +2043,22 @@ declare namespace tracer {
2025
2043
  * @default true
2026
2044
  */
2027
2045
  signature?: boolean;
2046
+
2047
+ /**
2048
+ * An object of optional callbacks to be executed during the respective
2049
+ * phase of an Apollo Gateway operation. Undefined callbacks default to a
2050
+ * noop function.
2051
+ *
2052
+ * @default {}
2053
+ */
2054
+ hooks?: {
2055
+ request?: (span?: Span, ctx?: any) => void;
2056
+ validate?: (span?: Span, ctx?: any) => void;
2057
+ plan?: (span?: Span, ctx?: any) => void;
2058
+ execute?: (span?: Span, ctx?: any) => void;
2059
+ fetch?: (span?: Span, ctx?: any) => void;
2060
+ postprocessing?: (span?: Span, ctx?: any) => void;
2061
+ };
2028
2062
  }
2029
2063
 
2030
2064
  /**
@@ -2088,6 +2122,12 @@ declare namespace tracer {
2088
2122
  */
2089
2123
  interface azure_service_bus extends Integration {}
2090
2124
 
2125
+ /**
2126
+ * This plugin automatically instruments the
2127
+ * durable-functions module
2128
+ */
2129
+ interface azure_durable_functions extends Integration {}
2130
+
2091
2131
  /**
2092
2132
  * This plugin patches the [bunyan](https://github.com/trentm/node-bunyan)
2093
2133
  * to automatically inject trace identifiers in log records when the
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "dd-trace",
3
- "version": "5.88.0",
3
+ "version": "5.90.0",
4
4
  "description": "Datadog APM tracing client for JavaScript",
5
5
  "main": "index.js",
6
6
  "typings": "index.d.ts",
@@ -16,7 +16,8 @@
16
16
  "lint": "node scripts/check_licenses.js && node scripts/check-no-coverage-artifacts.js && eslint . --concurrency=auto --max-warnings 0",
17
17
  "lint:fix": "node scripts/check_licenses.js && node scripts/check-no-coverage-artifacts.js && eslint . --concurrency=auto --max-warnings 0 --fix",
18
18
  "lint:inspect": "npx @eslint/config-inspector@latest",
19
- "lint:codeowners": "node scripts/codeowners.mjs",
19
+ "lint:codeowners": "codeowners-audit",
20
+ "lint:codeowners:ci": "codeowners-audit --glob='**/*.spec.js'",
20
21
  "release:proposal": "node scripts/release/proposal",
21
22
  "services": "node ./scripts/install_plugin_modules && node packages/dd-trace/test/setup/services",
22
23
  "test": "echo '\nError: The root \"npm test\" command is intentionally disabled.\n\nInstead, run specific test suites:\n - npm run test:trace:core\n - npm run test:appsec\n - etc.\n\nOr run individual test files:\n npx mocha path/to/test.spec.js\n\nSee CONTRIBUTING.md (Testing section) for more details.\n' && exit 1",
@@ -60,6 +61,7 @@
60
61
  "test:integration": "mocha --timeout 60000 \"integration-tests/*.spec.js\"",
61
62
  "test:integration:aiguard": "mocha --timeout 60000 \"integration-tests/aiguard/*.spec.js\"",
62
63
  "test:integration:appsec": "mocha --timeout 60000 \"integration-tests/appsec/*.spec.js\"",
64
+ "test:integration:bun": "mocha --timeout 60000 \"integration-tests/bun/*.spec.js\"",
63
65
  "test:integration:cucumber": "mocha --timeout 60000 \"integration-tests/cucumber/*.spec.js\"",
64
66
  "test:integration:cypress": "mocha --timeout 60000 \"integration-tests/cypress/*.spec.js\"",
65
67
  "test:integration:debugger": "mocha --timeout 60000 \"integration-tests/debugger/*.spec.js\"",
@@ -132,44 +134,46 @@
132
134
  ],
133
135
  "dependencies": {
134
136
  "dc-polyfill": "^0.1.10",
135
- "import-in-the-middle": "^2.0.6"
137
+ "import-in-the-middle": "^3.0.0"
136
138
  },
137
139
  "optionalDependencies": {
138
140
  "@datadog/libdatadog": "0.8.1",
139
141
  "@datadog/native-appsec": "11.0.1",
140
142
  "@datadog/native-iast-taint-tracking": "4.1.0",
141
143
  "@datadog/native-metrics": "3.1.1",
142
- "@datadog/openfeature-node-server": "^0.3.3",
143
- "@datadog/pprof": "5.13.4",
144
+ "@datadog/openfeature-node-server": "^1.1.0",
145
+ "@datadog/pprof": "5.13.5",
144
146
  "@datadog/wasm-js-rewriter": "5.0.1",
145
147
  "@opentelemetry/api": ">=1.0.0 <1.10.0",
146
148
  "@opentelemetry/api-logs": "<1.0.0",
147
- "oxc-parser": "^0.115.0"
149
+ "oxc-parser": "^0.116.0"
148
150
  },
149
151
  "devDependencies": {
152
+ "@actions/core": "^3.0.0",
153
+ "@actions/github": "^9.0.0",
150
154
  "@babel/helpers": "^7.28.6",
151
155
  "@eslint/eslintrc": "^3.3.1",
152
156
  "@eslint/js": "^9.39.2",
153
157
  "@msgpack/msgpack": "^3.1.3",
154
158
  "@openfeature/core": "^1.8.1",
155
159
  "@openfeature/server-sdk": "~1.20.0",
156
- "@snyk/github-codeowners": "^1.1.0",
157
- "@stylistic/eslint-plugin": "^5.7.1",
160
+ "@stylistic/eslint-plugin": "^5.10.0",
158
161
  "@types/mocha": "^10.0.10",
159
162
  "@types/node": "^18.19.106",
160
163
  "@types/sinon": "^21.0.0",
161
164
  "axios": "^1.13.4",
162
165
  "benchmark": "^2.1.4",
163
166
  "body-parser": "^2.2.2",
164
- "bun": "1.3.8",
167
+ "bun": "1.3.10",
168
+ "codeowners-audit": "^2.7.1",
165
169
  "eslint": "^9.39.2",
166
- "eslint-plugin-cypress": "^5.2.1",
170
+ "eslint-plugin-cypress": "^6.1.0",
167
171
  "eslint-plugin-import": "^2.32.0",
168
172
  "eslint-plugin-jsdoc": "^62.5.0",
169
173
  "eslint-plugin-mocha": "^11.2.0",
170
174
  "eslint-plugin-n": "^17.23.2",
171
175
  "eslint-plugin-promise": "^7.2.1",
172
- "eslint-plugin-unicorn": "^62.0.0",
176
+ "eslint-plugin-unicorn": "^63.0.0",
173
177
  "express": "^5.1.0",
174
178
  "glob": "^10.4.5",
175
179
  "globals": "^17.2.0",
@@ -179,9 +183,9 @@
179
183
  "mocha": "^11.6.0",
180
184
  "mocha-junit-reporter": "^2.2.1",
181
185
  "mocha-multi-reporters": "^1.5.1",
182
- "multer": "^2.0.2",
186
+ "multer": "^2.1.1",
183
187
  "nock": "^13.5.6",
184
- "nyc": "^17.1.0",
188
+ "nyc": "^18.0.0",
185
189
  "octokit": "^5.0.3",
186
190
  "opentracing": ">=0.14.7",
187
191
  "p-limit": "^7.2.0",
@@ -189,7 +193,7 @@
189
193
  "retry": "^0.13.1",
190
194
  "semifies": "^1.0.0",
191
195
  "semver": "^7.7.2",
192
- "sinon": "^21.0.1",
196
+ "sinon": "^21.0.2",
193
197
  "tiktoken": "^1.0.21",
194
198
  "typescript": "^5.9.2",
195
199
  "workerpool": "^10.0.0",
@@ -0,0 +1,75 @@
1
+ 'use strict'
2
+
3
+ const dc = require('dc-polyfill')
4
+ const shimmer = require('../../datadog-shimmer')
5
+
6
+ const {
7
+ addHook,
8
+ } = require('./helpers/instrument')
9
+
10
+ /**
11
+ * @type {import('diagnostics_channel').TracingChannel}
12
+ */
13
+ const azureDurableFunctionsChannel = dc.tracingChannel('datadog:azure:durable-functions:invoke')
14
+
15
+ addHook({ name: 'durable-functions', versions: ['>=3'], patchDefault: false }, (df) => {
16
+ const { app } = df
17
+
18
+ shimmer.wrap(app, 'entity', entityWrapper)
19
+ shimmer.wrap(app, 'activity', activityHandler)
20
+
21
+ return df
22
+ })
23
+
24
+ function entityWrapper (method) {
25
+ return function (entityName, arg) {
26
+ // because this method is overloaded, the second argument can either be an object
27
+ // with the handler or the handler itself, so first we figure which type it is
28
+ if (typeof arg === 'function') {
29
+ // if a function, this is the handler we want to wrap and trace
30
+ arguments[1] = shimmer.wrapFunction(arg, handler => entityHandler(handler, entityName))
31
+ } else {
32
+ // if an object, access the handler then trace it
33
+ shimmer.wrap(arg, 'handler', handler => entityHandler(handler, entityName))
34
+ }
35
+
36
+ return method.apply(this, arguments)
37
+ }
38
+ }
39
+
40
+ function entityHandler (handler, entityName) {
41
+ return function () {
42
+ if (!azureDurableFunctionsChannel.hasSubscribers) return handler.apply(this, arguments)
43
+
44
+ const entityContext = arguments[0]
45
+ return azureDurableFunctionsChannel.traceSync(
46
+ handler,
47
+ { trigger: 'Entity', functionName: entityName, operationName: entityContext?.df?.operationName },
48
+ this, ...arguments)
49
+ }
50
+ }
51
+
52
+ function activityHandler (method) {
53
+ return function (activityName, activityOptions) {
54
+ shimmer.wrap(activityOptions, 'handler', handler => {
55
+ const isAsync =
56
+ handler && handler.constructor && handler.constructor.name === 'AsyncFunction'
57
+
58
+ return function () {
59
+ if (!azureDurableFunctionsChannel.hasSubscribers) return handler.apply(this, arguments)
60
+
61
+ // use tracePromise if this is an async handler. otherwise, use traceSync
62
+ return isAsync
63
+ ? azureDurableFunctionsChannel.tracePromise(
64
+ handler,
65
+ { trigger: 'Activity', functionName: activityName },
66
+ this, ...arguments)
67
+ : azureDurableFunctionsChannel.traceSync(
68
+ handler,
69
+ { trigger: 'Activity', functionName: activityName },
70
+ this, ...arguments)
71
+ }
72
+ })
73
+ return method.apply(this, arguments)
74
+ }
75
+ }
@@ -55,6 +55,8 @@ const originalCoverageMap = createCoverageMap()
55
55
  const patched = new WeakSet()
56
56
 
57
57
  const lastStatusByPickleId = new Map()
58
+ /** For ATR: statuses keyed by stable scenario id (uri:name) so retries accumulate correctly */
59
+ const atrStatusesByScenarioKey = new Map()
58
60
  const numRetriesByPickleId = new Map()
59
61
  const numAttemptToCtx = new Map()
60
62
  const newTestsByTestFullname = new Map()
@@ -275,6 +277,17 @@ function wrapRun (pl, isLatestVersion, version) {
275
277
  const isFirstAttempt = numAttempt++ === 0
276
278
  const isAtrRetry = !isFirstAttempt && isFlakyTestRetriesEnabled
277
279
 
280
+ // ATR: record this attempt as failed so when run().finally runs (after retry) we have all statuses
281
+ if (isFlakyTestRetriesEnabled && isAtrRetry === false) {
282
+ const nameForKey = this.pickle.name.replace(/\s*\(attempt \d+(?:, retried)?\)\s*$/, '')
283
+ const atrKey = `${this.pickle.uri}:${nameForKey}`
284
+ if (atrStatusesByScenarioKey.has(atrKey)) {
285
+ atrStatusesByScenarioKey.get(atrKey).push('fail')
286
+ } else {
287
+ atrStatusesByScenarioKey.set(atrKey, ['fail'])
288
+ }
289
+ }
290
+
278
291
  if (promises.hitBreakpointPromise) {
279
292
  await promises.hitBreakpointPromise
280
293
  }
@@ -367,6 +380,25 @@ function wrapRun (pl, isLatestVersion, version) {
367
380
  }
368
381
  }
369
382
 
383
+ // ATR: accumulate statuses by stable scenario key (uri:name) so retries are grouped.
384
+ // Cucumber appends " (attempt N)" or " (attempt N, retried)" to the scenario name; normalize for keying.
385
+ if (isFlakyTestRetriesEnabled && !isAttemptToFix && !isEfdRetry && numTestRetries > 0) {
386
+ const nameForKey = this.pickle.name.replace(/\s*\(attempt \d+(?:, retried)?\)\s*$/, '')
387
+ const atrKey = `${this.pickle.uri}:${nameForKey}`
388
+ if (atrStatusesByScenarioKey.has(atrKey)) {
389
+ atrStatusesByScenarioKey.get(atrKey).push(status)
390
+ } else {
391
+ atrStatusesByScenarioKey.set(atrKey, [status])
392
+ }
393
+ const atrStatuses = atrStatusesByScenarioKey.get(atrKey)
394
+ const pickleStatuses = lastStatusByPickleId.get(this.pickle.id)
395
+ const statusesToCheck = atrStatuses?.length >= (numTestRetries + 1) ? atrStatuses : pickleStatuses
396
+ if (statusesToCheck && statusesToCheck.length === numTestRetries + 1 &&
397
+ statusesToCheck.every(s => s === 'fail')) {
398
+ hasFailedAllRetries = true
399
+ }
400
+ }
401
+
370
402
  const attemptCtx = numAttemptToCtx.get(numAttempt)
371
403
 
372
404
  const error = getErrorFromCucumberResult(result)
@@ -480,7 +512,8 @@ function getWrappedStart (start, frameworkVersion, isParallel = false, isCoordin
480
512
  earlyFlakeDetectionFaultyThreshold = configurationResponse.libraryConfig?.earlyFlakeDetectionFaultyThreshold
481
513
  isSuitesSkippingEnabled = configurationResponse.libraryConfig?.isSuitesSkippingEnabled
482
514
  isFlakyTestRetriesEnabled = configurationResponse.libraryConfig?.isFlakyTestRetriesEnabled
483
- numTestRetries = configurationResponse.libraryConfig?.flakyTestRetriesCount
515
+ const configRetryCount = configurationResponse.libraryConfig?.flakyTestRetriesCount
516
+ numTestRetries = (typeof configRetryCount === 'number' && configRetryCount > 0) ? configRetryCount : 0
484
517
  isKnownTestsEnabled = configurationResponse.libraryConfig?.isKnownTestsEnabled
485
518
  isTestManagementTestsEnabled = configurationResponse.libraryConfig?.isTestManagementEnabled
486
519
  testManagementAttemptToFixRetries = configurationResponse.libraryConfig?.testManagementAttemptToFixRetries
@@ -563,6 +596,7 @@ function getWrappedStart (start, frameworkVersion, isParallel = false, isCoordin
563
596
  options.retry = numTestRetries
564
597
  }
565
598
 
599
+ atrStatusesByScenarioKey.clear()
566
600
  sessionStartCh.publish({ command, frameworkVersion })
567
601
 
568
602
  if (!errorSkippableRequest && skippedSuites.length) {
@@ -1016,6 +1050,9 @@ addHook({
1016
1050
  this.options.worldParameters._ddModifiedFiles = modifiedFiles
1017
1051
  }
1018
1052
 
1053
+ this.options.worldParameters._ddIsFlakyTestRetriesEnabled = isFlakyTestRetriesEnabled
1054
+ this.options.worldParameters._ddNumTestRetries = numTestRetries
1055
+
1019
1056
  return startWorker.apply(this, arguments)
1020
1057
  })
1021
1058
  return adapterPackage
@@ -1051,6 +1088,8 @@ addHook({
1051
1088
  if (isImpactedTestsEnabled) {
1052
1089
  modifiedFiles = this.options.worldParameters._ddModifiedFiles
1053
1090
  }
1091
+ isFlakyTestRetriesEnabled = !!this.options.worldParameters._ddIsFlakyTestRetriesEnabled
1092
+ numTestRetries = this.options.worldParameters._ddNumTestRetries ?? 0
1054
1093
  }
1055
1094
  )
1056
1095
  return workerPackage
@@ -6,12 +6,21 @@ const {
6
6
  addHook,
7
7
  } = require('./helpers/instrument')
8
8
 
9
- addHook({ name: '@elastic/transport', file: 'lib/Transport.js', versions: ['>=8'] }, (exports) => {
10
- shimmer.wrap(exports.default.prototype, 'request', createWrapRequest('elasticsearch'))
11
- shimmer.wrap(exports.default.prototype, 'getConnection', createWrapGetConnection('elasticsearch'))
9
+ addHook({ name: '@elastic/transport', file: 'lib/Transport.js', versions: ['>=8 <9'] }, (exports) => {
10
+ wrapTransportPrototype(exports.default)
12
11
  return exports
13
12
  })
14
13
 
14
+ addHook({ name: '@elastic/transport', versions: ['>=9'] }, (exports) => {
15
+ wrapTransportPrototype(exports.Transport)
16
+ return exports
17
+ })
18
+
19
+ function wrapTransportPrototype (Transport) {
20
+ shimmer.wrap(Transport.prototype, 'request', createWrapRequest('elasticsearch'))
21
+ shimmer.wrap(Transport.prototype, 'getConnection', createWrapGetConnection('elasticsearch'))
22
+ }
23
+
15
24
  addHook({ name: '@elastic/elasticsearch', file: 'lib/Transport.js', versions: ['>=5.6.16 <8', '>=8'] }, Transport => {
16
25
  shimmer.wrap(Transport.prototype, 'request', createWrapRequest('elasticsearch'))
17
26
  shimmer.wrap(Transport.prototype, 'getConnection', createWrapGetConnection('elasticsearch'))
@@ -8,6 +8,7 @@ module.exports = {
8
8
  '@aws-sdk/smithy-client': () => require('../aws-sdk'),
9
9
  '@azure/event-hubs': () => require('../azure-event-hubs'),
10
10
  '@azure/functions': () => require('../azure-functions'),
11
+ 'durable-functions': () => require('../azure-durable-functions'),
11
12
  '@azure/service-bus': () => require('../azure-service-bus'),
12
13
  '@cucumber/cucumber': () => require('../cucumber'),
13
14
  '@playwright/test': () => require('../playwright'),
@@ -1,95 +1,45 @@
1
1
  'use strict'
2
2
 
3
- /*
4
- This rewriter is basically a JavaScript version of Orchestrion-JS. The goal is
5
- not to replace Orchestrion-JS, but rather to make it easier and faster to write
6
- new integrations in the short-term, especially as many changes to the rewriter
7
- will be needed as all the patterns we need have not been identified yet. This
8
- will avoid the back and forth of having to make Rust changes to an external
9
- library for every integration change or addition that requires something new.
10
-
11
- In the meantime, we'll work concurrently on a change to Orchestrion-JS that
12
- adds an "arbitrary transform" or "plugin" system that can be used from
13
- JavaScript, in order to enable quick iteration while still using Orchestrion-JS.
14
- Once that's done we'll use that, so that we can remove this JS approach and
15
- return to using Orchestrion-JS.
16
-
17
- The long term goal is to backport any additional features we add to the JS
18
- rewriter (or using the plugin system in Orchestrion-JS once we're using that)
19
- to Orchestrion-JS once we're confident that the implementation is fairly
20
- complete and has all features we need.
21
-
22
- Here is a list of the additions and changes in this rewriter compared to
23
- Orchestrion-JS that will need to be backported:
24
-
25
- (NOTE: Please keep this list up-to-date whenever new features are added)
26
-
27
- - Supports an `astQuery` field to filter AST nodes with an esquery query. This
28
- is mostly meant to be used when experimenting or if what needs to be queried
29
- is not a function. We'll see over time if something like this is needed to be
30
- backported or if it can be replaced by simpler queries.
31
- - Supports replacing methods of child class instances in the base constructor.
32
- - Supports tracing iterator (sync/async) returning functions (sync/async).
33
- */
34
-
35
3
  const { readFileSync } = require('fs')
36
4
  const { join } = require('path')
37
- const semifies = require('../../../../../vendor/dist/semifies')
38
5
  const log = require('../../../../dd-trace/src/log')
39
- const { getEnvironmentVariable } = require('../../../../dd-trace/src/config/helper')
40
- const { transform } = require('./transformer')
41
- const { generate, parse, traverse } = require('./compiler')
42
6
  const instrumentations = require('./instrumentations')
7
+ const { create } = require('./orchestrion')
43
8
 
44
- const NODE_OPTIONS = getEnvironmentVariable('NODE_OPTIONS')
45
-
46
- /** @type {Record<string, Set<string>>} map of module base name to supported function query versions */
47
- const supported = {}
9
+ /** @type {Record<string, string>} map of module base name to version */
10
+ const moduleVersions = {}
48
11
  const disabled = new Set()
49
-
50
- // TODO: Source maps without `--enable-source-maps`.
51
- const enableSourceMaps = NODE_OPTIONS?.includes('--enable-source-maps') ||
52
- process.execArgv?.some(arg => arg.includes('--enable-source-maps'))
53
-
54
- let SourceMapGenerator
12
+ const matcher = create(instrumentations, 'dc-polyfill')
55
13
 
56
14
  function rewrite (content, filename, format) {
57
15
  if (!content) return content
16
+ if (!filename.includes('node_modules')) return content
58
17
 
59
- const sourceType = format === 'module' ? 'module' : 'script'
60
-
61
- try {
62
- let ast
63
-
64
- filename = filename.replace('file://', '')
65
-
66
- for (const inst of instrumentations) {
67
- const { astQuery, functionQuery = {}, module: { name, versionRange, filePath } } = inst
18
+ filename = filename.replace('file://', '')
68
19
 
69
- if (disabled.has(name)) continue
70
- if (!filename.endsWith(`${name}/${filePath}`)) continue
71
- if (!satisfies(filename, filePath, versionRange)) continue
20
+ const moduleType = format === 'module' ? 'esm' : 'cjs'
21
+ const [modulePath] = filename.split('/node_modules/').reverse()
22
+ const moduleParts = modulePath.split('/')
23
+ const splitIndex = moduleParts[0].startsWith('@') ? 2 : 1
24
+ const moduleName = moduleParts.slice(0, splitIndex).join('/')
25
+ const filePath = moduleParts.slice(splitIndex).join('/')
26
+ const version = getVersion(filename, filePath)
72
27
 
73
- ast ??= parse(content.toString(), { range: true, sourceType })
28
+ if (disabled.has(moduleName)) return content
74
29
 
75
- const query = astQuery || fromFunctionQuery(functionQuery)
76
- const state = { ...inst, sourceType, functionQuery }
30
+ const transformer = matcher.getTransformer(moduleName, version, filePath)
77
31
 
78
- traverse(ast, query, (...args) => transform(state, ...args))
79
- }
32
+ if (!transformer) return content
80
33
 
81
- if (ast) {
82
- if (!enableSourceMaps) return generate(ast)
34
+ try {
35
+ // TODO: pass existing sourcemap as input for remapping
36
+ const { code, map } = transformer.transform(content, moduleType)
83
37
 
84
- // TODO: Can we use the same version of `source-map` that DI uses?
85
- SourceMapGenerator ??= require('../../../../../vendor/dist/@datadog/source-map').SourceMapGenerator
38
+ if (!map) return code
86
39
 
87
- const sourceMap = new SourceMapGenerator({ file: filename })
88
- const code = generate(ast, { sourceMap })
89
- const map = Buffer.from(sourceMap.toString()).toString('base64')
40
+ const inlineMap = Buffer.from(map).toString('base64')
90
41
 
91
- return code + '\n' + `//# sourceMappingURL=data:application/json;base64,${map}`
92
- }
42
+ return code + '\n' + `//# sourceMappingURL=data:application/json;base64,${inlineMap}`
93
43
  } catch (e) {
94
44
  log.error(e)
95
45
  }
@@ -101,55 +51,20 @@ function disable (instrumentation) {
101
51
  disabled.add(instrumentation)
102
52
  }
103
53
 
104
- function satisfies (filename, filePath, versions) {
54
+ function getVersion (filename, filePath) {
105
55
  const [basename] = filename.split(filePath)
106
56
 
107
- supported[basename] ??= new Set()
108
-
109
- if (!supported[basename].has(versions)) {
57
+ if (!moduleVersions[basename]) {
110
58
  try {
111
59
  const pkg = JSON.parse(readFileSync(
112
60
  join(basename, 'package.json'), 'utf8'
113
61
  ))
114
62
 
115
- if (semifies(pkg.version, versions)) {
116
- supported[basename].add(versions)
117
- }
63
+ moduleVersions[basename] = pkg.version
118
64
  } catch {}
119
65
  }
120
66
 
121
- return supported[basename].has(versions)
122
- }
123
-
124
- // TODO: Support index
125
- function fromFunctionQuery (functionQuery) {
126
- const { methodName, functionName, expressionName, className } = functionQuery
127
- const queries = []
128
-
129
- if (className) {
130
- queries.push(
131
- `[id.name="${className}"]`,
132
- `[id.name="${className}"] > ClassExpression`,
133
- `[id.name="${className}"] > ClassBody > [key.name="${methodName}"] > [async]`,
134
- `[id.name="${className}"] > ClassExpression > ClassBody > [key.name="${methodName}"] > [async]`
135
- )
136
- } else if (methodName) {
137
- queries.push(
138
- `ClassBody > [key.name="${methodName}"] > [async]`,
139
- `Property[key.name="${methodName}"] > [async]`
140
- )
141
- }
142
-
143
- if (functionName) {
144
- queries.push(`FunctionDeclaration[id.name="${functionName}"][async]`)
145
- } else if (expressionName) {
146
- queries.push(
147
- `FunctionExpression[id.name="${expressionName}"][async]`,
148
- `ArrowFunctionExpression[id.name="${expressionName}"][async]`
149
- )
150
- }
151
-
152
- return queries.join(', ')
67
+ return moduleVersions[basename]
153
68
  }
154
69
 
155
70
  module.exports = { rewrite, disable }
@@ -1,6 +1,6 @@
1
1
  'use strict'
2
2
 
3
- const log = require('../../../../dd-trace/src/log')
3
+ const log = require('../../../../../dd-trace/src/log')
4
4
 
5
5
  // eslint-disable-next-line camelcase, no-undef
6
6
  const runtimeRequire = typeof __webpack_require__ === 'function' ? __non_webpack_require__ : require
@@ -25,7 +25,7 @@ const compiler = {
25
25
  log.error(e)
26
26
 
27
27
  // Fallback for when OXC is not available.
28
- const meriyah = require('../../../../../vendor/dist/meriyah')
28
+ const meriyah = require('../../../../../../vendor/dist/meriyah')
29
29
 
30
30
  compiler.parse = (sourceText, { range, sourceType } = {}) => {
31
31
  return meriyah.parse(sourceText.toString(), {
@@ -40,7 +40,7 @@ const compiler = {
40
40
  },
41
41
 
42
42
  generate: (...args) => {
43
- const astring = require('../../../../../vendor/dist/astring')
43
+ const astring = require('../../../../../../vendor/dist/astring')
44
44
 
45
45
  compiler.generate = astring.generate
46
46
 
@@ -48,7 +48,7 @@ const compiler = {
48
48
  },
49
49
 
50
50
  traverse: (ast, query, visitor) => {
51
- const esquery = require('../../../../../vendor/dist/esquery').default
51
+ const esquery = require('../../../../../../vendor/dist/esquery').default
52
52
 
53
53
  compiler.traverse = (ast, query, visitor) => {
54
54
  return esquery.traverse(ast, esquery.parse(query), visitor)
@@ -58,7 +58,7 @@ const compiler = {
58
58
  },
59
59
 
60
60
  query: (ast, query) => {
61
- const esquery = require('../../../../../vendor/dist/esquery').default
61
+ const esquery = require('../../../../../../vendor/dist/esquery').default
62
62
 
63
63
  compiler.query = esquery.query
64
64