dd-trace 5.72.0 → 5.74.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/LICENSE-3rdparty.csv +3 -0
  2. package/index.d.ts +49 -0
  3. package/package.json +11 -6
  4. package/packages/datadog-core/src/utils/src/set.js +5 -1
  5. package/packages/datadog-esbuild/index.js +112 -36
  6. package/packages/datadog-esbuild/src/utils.js +198 -0
  7. package/packages/datadog-instrumentations/src/azure-service-bus.js +49 -22
  8. package/packages/datadog-instrumentations/src/cookie-parser.js +2 -0
  9. package/packages/datadog-instrumentations/src/express-session.js +1 -0
  10. package/packages/datadog-instrumentations/src/express.js +82 -0
  11. package/packages/datadog-instrumentations/src/helpers/router-helper.js +238 -0
  12. package/packages/datadog-instrumentations/src/jest.js +60 -14
  13. package/packages/datadog-instrumentations/src/mocha/utils.js +3 -4
  14. package/packages/datadog-instrumentations/src/playwright.js +110 -56
  15. package/packages/datadog-instrumentations/src/router.js +63 -6
  16. package/packages/datadog-instrumentations/src/ws.js +3 -3
  17. package/packages/datadog-plugin-amqplib/src/consumer.js +1 -1
  18. package/packages/datadog-plugin-azure-functions/src/index.js +24 -14
  19. package/packages/datadog-plugin-azure-service-bus/src/index.js +1 -1
  20. package/packages/datadog-plugin-azure-service-bus/src/producer.js +60 -12
  21. package/packages/datadog-plugin-express/src/code_origin.js +2 -0
  22. package/packages/datadog-plugin-jest/src/index.js +53 -18
  23. package/packages/datadog-plugin-ws/src/close.js +2 -2
  24. package/packages/datadog-plugin-ws/src/producer.js +1 -1
  25. package/packages/datadog-plugin-ws/src/receiver.js +1 -1
  26. package/packages/dd-trace/src/appsec/index.js +9 -1
  27. package/packages/dd-trace/src/appsec/reporter.js +2 -3
  28. package/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js +5 -0
  29. package/packages/dd-trace/src/config-helper.js +3 -1
  30. package/packages/dd-trace/src/config.js +437 -446
  31. package/packages/dd-trace/src/config_defaults.js +5 -12
  32. package/packages/dd-trace/src/llmobs/plugins/ai/util.js +8 -3
  33. package/packages/dd-trace/src/llmobs/plugins/base.js +11 -12
  34. package/packages/dd-trace/src/llmobs/sdk.js +20 -4
  35. package/packages/dd-trace/src/llmobs/tagger.js +12 -0
  36. package/packages/dd-trace/src/opentelemetry/logs/otlp_http_log_exporter.js +7 -127
  37. package/packages/dd-trace/src/opentelemetry/logs/otlp_transformer.js +19 -134
  38. package/packages/dd-trace/src/opentelemetry/otlp/metrics.proto +720 -0
  39. package/packages/dd-trace/src/opentelemetry/otlp/metrics_service.proto +78 -0
  40. package/packages/dd-trace/src/opentelemetry/otlp/otlp_http_exporter_base.js +177 -0
  41. package/packages/dd-trace/src/opentelemetry/otlp/otlp_transformer_base.js +163 -0
  42. package/packages/dd-trace/src/opentelemetry/{protos → otlp}/protobuf_loader.js +24 -6
  43. package/packages/dd-trace/src/plugins/util/ci.js +3 -2
  44. package/packages/dd-trace/src/plugins/util/stacktrace.js +16 -1
  45. package/packages/dd-trace/src/supported-configurations.json +2 -0
  46. package/packages/dd-trace/src/telemetry/endpoints.js +27 -1
  47. package/packages/dd-trace/src/telemetry/index.js +16 -13
  48. package/scripts/preinstall.js +3 -1
  49. package/version.js +2 -1
  50. /package/packages/dd-trace/src/opentelemetry/{protos → otlp}/common.proto +0 -0
  51. /package/packages/dd-trace/src/opentelemetry/{protos → otlp}/logs.proto +0 -0
  52. /package/packages/dd-trace/src/opentelemetry/{protos → otlp}/logs_service.proto +0 -0
  53. /package/packages/dd-trace/src/opentelemetry/{protos → otlp}/resource.proto +0 -0
@@ -0,0 +1,78 @@
1
+ // Vendored from: https://github.com/open-telemetry/opentelemetry-proto/blob/v1.7.0/opentelemetry/proto/collector/metrics/v1/metrics_service.proto
2
+ // Copyright 2019, OpenTelemetry Authors
3
+ //
4
+ // Licensed under the Apache License, Version 2.0 (the "License");
5
+ // you may not use this file except in compliance with the License.
6
+ // You may obtain a copy of the License at
7
+ //
8
+ // http://www.apache.org/licenses/LICENSE-2.0
9
+ //
10
+ // Unless required by applicable law or agreed to in writing, software
11
+ // distributed under the License is distributed on an "AS IS" BASIS,
12
+ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ // See the License for the specific language governing permissions and
14
+ // limitations under the License.
15
+
16
+ syntax = "proto3";
17
+
18
+ package opentelemetry.proto.collector.metrics.v1;
19
+
20
+ import "metrics.proto";
21
+
22
+ option csharp_namespace = "OpenTelemetry.Proto.Collector.Metrics.V1";
23
+ option java_multiple_files = true;
24
+ option java_package = "io.opentelemetry.proto.collector.metrics.v1";
25
+ option java_outer_classname = "MetricsServiceProto";
26
+ option go_package = "go.opentelemetry.io/proto/otlp/collector/metrics/v1";
27
+
28
+ // Service that can be used to push metrics between one Application
29
+ // instrumented with OpenTelemetry and a collector, or between a collector and a
30
+ // central collector.
31
+ service MetricsService {
32
+ rpc Export(ExportMetricsServiceRequest) returns (ExportMetricsServiceResponse) {}
33
+ }
34
+
35
+ message ExportMetricsServiceRequest {
36
+ // An array of ResourceMetrics.
37
+ // For data coming from a single resource this array will typically contain one
38
+ // element. Intermediary nodes (such as OpenTelemetry Collector) that receive
39
+ // data from multiple origins typically batch the data before forwarding further and
40
+ // in that case this array will contain multiple elements.
41
+ repeated opentelemetry.proto.metrics.v1.ResourceMetrics resource_metrics = 1;
42
+ }
43
+
44
+ message ExportMetricsServiceResponse {
45
+ // The details of a partially successful export request.
46
+ //
47
+ // If the request is only partially accepted
48
+ // (i.e. when the server accepts only parts of the data and rejects the rest)
49
+ // the server MUST initialize the `partial_success` field and MUST
50
+ // set the `rejected_<signal>` with the number of items it rejected.
51
+ //
52
+ // Servers MAY also make use of the `partial_success` field to convey
53
+ // warnings/suggestions to senders even when the request was fully accepted.
54
+ // In such cases, the `rejected_<signal>` MUST have a value of `0` and
55
+ // the `error_message` MUST be non-empty.
56
+ //
57
+ // A `partial_success` message with an empty value (rejected_<signal> = 0 and
58
+ // `error_message` = "") is equivalent to it not being set/present. Senders
59
+ // SHOULD interpret it the same way as in the full success case.
60
+ ExportMetricsPartialSuccess partial_success = 1;
61
+ }
62
+
63
+ message ExportMetricsPartialSuccess {
64
+ // The number of rejected data points.
65
+ //
66
+ // A `rejected_<signal>` field holding a `0` value indicates that the
67
+ // request was fully accepted.
68
+ int64 rejected_data_points = 1;
69
+
70
+ // A developer-facing human-readable message in English. It should be used
71
+ // either to explain why the server rejected parts of the data during a partial
72
+ // success or to convey warnings/suggestions during a full success. The message
73
+ // should offer guidance on how users can address such issues.
74
+ //
75
+ // error_message is an optional field. An error_message with an empty value
76
+ // is equivalent to it not being set.
77
+ string error_message = 2;
78
+ }
@@ -0,0 +1,177 @@
1
+ 'use strict'
2
+
3
+ const http = require('http')
4
+ const { URL } = require('url')
5
+ const log = require('../../log')
6
+ const telemetryMetrics = require('../../telemetry/metrics')
7
+
8
+ const tracerMetrics = telemetryMetrics.manager.namespace('tracers')
9
+
10
+ /**
11
+ * Base class for OTLP HTTP exporters.
12
+ *
13
+ * This implementation follows the OTLP HTTP specification:
14
+ * https://opentelemetry.io/docs/specs/otlp/#otlphttp
15
+ *
16
+ * @class OtlpHttpExporterBase
17
+ */
18
+ class OtlpHttpExporterBase {
19
+ #telemetryTags
20
+
21
+ /**
22
+ * Creates a new OtlpHttpExporterBase instance.
23
+ *
24
+ * @param {string} url - OTLP endpoint URL
25
+ * @param {string} headers - Additional HTTP headers as comma-separated key=value string
26
+ * @param {number} timeout - Request timeout in milliseconds
27
+ * @param {string} protocol - OTLP protocol (http/protobuf or http/json)
28
+ * @param {string} defaultPath - Default path to use if URL has no path
29
+ * @param {string} signalType - Signal type for error messages (e.g., 'logs', 'metrics')
30
+ */
31
+ constructor (url, headers, timeout, protocol, defaultPath, signalType) {
32
+ const parsedUrl = new URL(url)
33
+
34
+ this.protocol = protocol
35
+ this.signalType = signalType
36
+
37
+ // If no path is provided, use default path
38
+ const path = parsedUrl.pathname === '/' ? defaultPath : parsedUrl.pathname
39
+ const isJson = protocol === 'http/json'
40
+
41
+ this.options = {
42
+ hostname: parsedUrl.hostname,
43
+ port: parsedUrl.port,
44
+ path: path + parsedUrl.search,
45
+ method: 'POST',
46
+ timeout,
47
+ headers: {
48
+ 'Content-Type': isJson ? 'application/json' : 'application/x-protobuf',
49
+ ...this.#parseAdditionalHeaders(headers)
50
+ }
51
+ }
52
+ this.#telemetryTags = [
53
+ 'protocol:http',
54
+ `encoding:${isJson ? 'json' : 'protobuf'}`
55
+ ]
56
+ }
57
+
58
+ /**
59
+ * Gets the telemetry tags for this exporter.
60
+ * @returns {Array<string>} Telemetry tags
61
+ * @protected
62
+ */
63
+ _getTelemetryTags () {
64
+ return this.#telemetryTags
65
+ }
66
+
67
+ /**
68
+ * Records telemetry metrics for exported data.
69
+ * @param {string} metricName - Name of the metric to record
70
+ * @param {number} count - Count to increment
71
+ * @param {Array<string>} [tags] - Optional custom tags (defaults to this exporter's tags)
72
+ * @protected
73
+ */
74
+ _recordTelemetry (metricName, count, tags) {
75
+ const telemetryTags = tags || this.#telemetryTags
76
+ tracerMetrics.count(metricName, telemetryTags).inc(count)
77
+ }
78
+
79
+ /**
80
+ * Sends the payload via HTTP request.
81
+ * @param {Buffer|string} payload - The payload to send
82
+ * @param {Function} resultCallback - Callback for the result
83
+ * @protected
84
+ */
85
+ _sendPayload (payload, resultCallback) {
86
+ const options = {
87
+ ...this.options,
88
+ headers: {
89
+ ...this.options.headers,
90
+ 'Content-Length': payload.length
91
+ }
92
+ }
93
+
94
+ const req = http.request(options, (res) => {
95
+ let data = ''
96
+
97
+ res.on('data', (chunk) => {
98
+ data += chunk
99
+ })
100
+
101
+ res.on('end', () => {
102
+ if (res.statusCode >= 200 && res.statusCode < 300) {
103
+ resultCallback({ code: 0 })
104
+ } else {
105
+ const error = new Error(`HTTP ${res.statusCode}: ${data}`)
106
+ resultCallback({ code: 1, error })
107
+ }
108
+ })
109
+ })
110
+
111
+ req.on('error', (error) => {
112
+ log.error(`Error sending OTLP ${this.signalType}:`, error)
113
+ resultCallback({ code: 1, error })
114
+ })
115
+
116
+ req.on('timeout', () => {
117
+ req.destroy()
118
+ const error = new Error('Request timeout')
119
+ resultCallback({ code: 1, error })
120
+ })
121
+
122
+ req.write(payload)
123
+ req.end()
124
+ }
125
+
126
+ /**
127
+ * Parses additional HTTP headers from a comma-separated string.
128
+ * @param {string} headersString - Comma-separated key=value pairs
129
+ * @returns {Record<string, string>} Parsed headers object
130
+ * @private
131
+ */
132
+ #parseAdditionalHeaders (headersString) {
133
+ const headers = {}
134
+ let key = ''
135
+ let value = ''
136
+ let readingKey = true
137
+
138
+ for (const char of headersString) {
139
+ if (readingKey) {
140
+ if (char === '=') {
141
+ readingKey = false
142
+ key = key.trim()
143
+ } else {
144
+ key += char
145
+ }
146
+ } else if (char === ',') {
147
+ value = value.trim()
148
+ if (key && value) {
149
+ headers[key] = value
150
+ }
151
+ key = ''
152
+ value = ''
153
+ readingKey = true
154
+ } else {
155
+ value += char
156
+ }
157
+ }
158
+
159
+ // Add the last pair if present
160
+ if (!readingKey) {
161
+ value = value.trim()
162
+ if (value) {
163
+ headers[key] = value
164
+ }
165
+ }
166
+
167
+ return headers
168
+ }
169
+
170
+ /**
171
+ * Shuts down the exporter.
172
+ * Subclasses can override to add cleanup logic.
173
+ */
174
+ shutdown () {}
175
+ }
176
+
177
+ module.exports = OtlpHttpExporterBase
@@ -0,0 +1,163 @@
1
+ 'use strict'
2
+
3
+ const log = require('../../log')
4
+
5
+ /**
6
+ * @typedef {import('@opentelemetry/api').Attributes} Attributes
7
+ */
8
+
9
+ /**
10
+ * Base class for OTLP transformers.
11
+ *
12
+ * This implementation provides common functionality for transforming
13
+ * data to OTLP format (protobuf or JSON).
14
+ *
15
+ * @class OtlpTransformerBase
16
+ */
17
+ class OtlpTransformerBase {
18
+ #resourceAttributes
19
+
20
+ /**
21
+ * Creates a new OtlpTransformerBase instance.
22
+ *
23
+ * @param {Attributes} resourceAttributes - Resource attributes
24
+ * @param {string} protocol - OTLP protocol (http/protobuf or http/json)
25
+ * @param {string} signalType - Signal type for warning messages (e.g., 'logs', 'metrics')
26
+ */
27
+ constructor (resourceAttributes, protocol, signalType) {
28
+ this.#resourceAttributes = this._transformAttributes(resourceAttributes)
29
+ if (protocol === 'grpc') {
30
+ log.warn(`OTLP gRPC protocol is not supported for ${signalType}. ` +
31
+ 'Defaulting to http/protobuf. gRPC protobuf support may be added in a future release.')
32
+ protocol = 'http/protobuf'
33
+ }
34
+ this.protocol = protocol
35
+ }
36
+
37
+ /**
38
+ * Groups items by instrumentation scope (name, version, schemaUrl, and attributes).
39
+ * @param {Array} items - Array of items to group
40
+ * @returns {Map<string, Array>} Map of instrumentation scope key to items
41
+ * @protected
42
+ */
43
+ _groupByInstrumentationScope (items) {
44
+ const grouped = new Map()
45
+
46
+ for (const item of items) {
47
+ const instrumentationScope = item.instrumentationScope || { name: '', version: '', schemaUrl: '', attributes: {} }
48
+ const attrsKey = JSON.stringify(instrumentationScope.attributes || {})
49
+ const key = `${instrumentationScope.name}@${instrumentationScope.version}@` +
50
+ `${instrumentationScope.schemaUrl}@${attrsKey}`
51
+
52
+ const group = grouped.get(key)
53
+ if (group === undefined) {
54
+ grouped.set(key, [item])
55
+ } else {
56
+ group.push(item)
57
+ }
58
+ }
59
+ return grouped
60
+ }
61
+
62
+ /**
63
+ * Transforms resource attributes to OTLP resource format.
64
+ * @returns {Object} OTLP resource object
65
+ * @protected
66
+ */
67
+ _transformResource () {
68
+ return {
69
+ attributes: this.#resourceAttributes,
70
+ droppedAttributesCount: 0
71
+ }
72
+ }
73
+
74
+ /**
75
+ * Transforms attributes to OTLP KeyValue format.
76
+ * @param {Object} attributes - Attributes to transform
77
+ * @returns {Object[]} Array of OTLP KeyValue objects
78
+ * @protected
79
+ */
80
+ _transformAttributes (attributes) {
81
+ if (!attributes) return []
82
+
83
+ return Object.entries(attributes).map(([key, value]) => ({
84
+ key,
85
+ value: this._transformAnyValue(value)
86
+ }))
87
+ }
88
+
89
+ /**
90
+ * Transforms attributes to JSON format (simplified).
91
+ * @param {Object} attributes - Attributes to transform
92
+ * @returns {Object[]} Array of OTLP KeyValue objects with string values
93
+ * @protected
94
+ */
95
+ _attributesToJson (attributes) {
96
+ if (!attributes) return []
97
+
98
+ return Object.entries(attributes).map(([key, value]) => ({
99
+ key,
100
+ value: { stringValue: String(value) }
101
+ }))
102
+ }
103
+
104
+ /**
105
+ * Transforms any value to OTLP AnyValue format.
106
+ * @param {any} value - Value to transform
107
+ * @returns {Object} OTLP AnyValue object
108
+ * @protected
109
+ */
110
+ _transformAnyValue (value) {
111
+ if (typeof value === 'string') {
112
+ return { stringValue: value }
113
+ } else if (typeof value === 'number') {
114
+ if (Number.isInteger(value)) {
115
+ return { intValue: value }
116
+ }
117
+ return { doubleValue: value }
118
+ } else if (typeof value === 'boolean') {
119
+ return { boolValue: value }
120
+ } else if (Array.isArray(value)) {
121
+ return {
122
+ arrayValue: {
123
+ values: value.map(v => this._transformAnyValue(v))
124
+ }
125
+ }
126
+ } else if (value && typeof value === 'object') {
127
+ return {
128
+ kvlistValue: {
129
+ values: Object.entries(value).map(([k, v]) => ({
130
+ key: k,
131
+ value: this._transformAnyValue(v)
132
+ }))
133
+ }
134
+ }
135
+ }
136
+ return { stringValue: String(value) }
137
+ }
138
+
139
+ /**
140
+ * Serializes data to protobuf format.
141
+ * @param {Object} protoType - Protobuf type from protobuf_loader
142
+ * @param {Object} data - Data to serialize
143
+ * @returns {Buffer} Protobuf-encoded data
144
+ * @protected
145
+ */
146
+ _serializeToProtobuf (protoType, data) {
147
+ const message = protoType.create(data)
148
+ const buffer = protoType.encode(message).finish()
149
+ return buffer
150
+ }
151
+
152
+ /**
153
+ * Serializes data to JSON format.
154
+ * @param {Object} data - Data to serialize
155
+ * @returns {Buffer} JSON-encoded data
156
+ * @protected
157
+ */
158
+ _serializeToJson (data) {
159
+ return Buffer.from(JSON.stringify(data))
160
+ }
161
+ }
162
+
163
+ module.exports = OtlpTransformerBase
@@ -1,9 +1,9 @@
1
1
  'use strict'
2
2
 
3
3
  /**
4
- * @fileoverview Protobuf Loader for OpenTelemetry Logs
4
+ * @fileoverview Protobuf Loader for OpenTelemetry Logs and Metrics
5
5
  *
6
- * This module loads protobuf definitions for OpenTelemetry logs.
6
+ * This module loads protobuf definitions for OpenTelemetry logs and metrics.
7
7
  *
8
8
  * VERSION SUPPORT:
9
9
  * - OTLP Protocol: v1.7.0
@@ -20,10 +20,17 @@ const path = require('path')
20
20
  let _root = null
21
21
  let protoLogsService = null
22
22
  let protoSeverityNumber = null
23
+ let protoMetricsService = null
24
+ let protoAggregationTemporality = null
23
25
 
24
26
  function getProtobufTypes () {
25
27
  if (_root) {
26
- return { protoLogsService, protoSeverityNumber }
28
+ return {
29
+ protoLogsService,
30
+ protoSeverityNumber,
31
+ protoMetricsService,
32
+ protoAggregationTemporality
33
+ }
27
34
  }
28
35
  // Load the proto files
29
36
  const protoDir = __dirname
@@ -31,16 +38,27 @@ function getProtobufTypes () {
31
38
  'common.proto',
32
39
  'resource.proto',
33
40
  'logs.proto',
34
- 'logs_service.proto'
41
+ 'logs_service.proto',
42
+ 'metrics.proto',
43
+ 'metrics_service.proto'
35
44
  ].map(file => path.join(protoDir, file))
36
45
 
37
46
  _root = protobuf.loadSync(protoFiles)
38
47
 
39
- // Get the message types
48
+ // Get the message types for logs
40
49
  protoLogsService = _root.lookupType('opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest')
41
50
  protoSeverityNumber = _root.lookupEnum('opentelemetry.proto.logs.v1.SeverityNumber')
42
51
 
43
- return { protoLogsService, protoSeverityNumber }
52
+ // Get the message types for metrics
53
+ protoMetricsService = _root.lookupType('opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest')
54
+ protoAggregationTemporality = _root.lookupEnum('opentelemetry.proto.metrics.v1.AggregationTemporality')
55
+
56
+ return {
57
+ protoLogsService,
58
+ protoSeverityNumber,
59
+ protoMetricsService,
60
+ protoAggregationTemporality
61
+ }
44
62
  }
45
63
 
46
64
  module.exports = {
@@ -95,10 +95,11 @@ function normalizeNumber (number) {
95
95
  }
96
96
 
97
97
  function getGitHubEventPayload () {
98
- if (!getEnvironmentVariable('GITHUB_EVENT_PATH')) {
98
+ const path = getEnvironmentVariable('GITHUB_EVENT_PATH')
99
+ if (!path) {
99
100
  return
100
101
  }
101
- return JSON.parse(readFileSync(getEnvironmentVariable('GITHUB_EVENT_PATH'), 'utf8'))
102
+ return JSON.parse(readFileSync(path, 'utf8'))
102
103
  }
103
104
 
104
105
  module.exports = {
@@ -1,12 +1,22 @@
1
1
  'use strict'
2
2
 
3
- const { relative, sep } = require('path')
3
+ const { relative, sep, join } = require('path')
4
4
 
5
5
  const cwd = process.cwd()
6
6
 
7
7
  const NODE_MODULES_PATTERN_MIDDLE = `${sep}node_modules${sep}`
8
8
  const NODE_MODULES_PATTERN_START = `node_modules${sep}`
9
9
 
10
+ /**
11
+ * We detect if we're running inside the dd-trace-js repo by checking if the
12
+ * current file path ends with the expected path structure from the repo root.
13
+ * This is needed for local and CI where dd-trace-js is not in node_modules.
14
+ * In production, these frames are already filtered by isNodeModulesFrame.
15
+ */
16
+ const SHOULD_FILTER_DD_TRACE_INSTRUMENTAION = __filename.endsWith(
17
+ join(sep, 'dd-trace-js', 'packages', 'dd-trace', 'src', 'plugins', 'util', 'stacktrace.js')
18
+ )
19
+
10
20
  module.exports = {
11
21
  getCallSites,
12
22
  parseUserLandFrames
@@ -90,6 +100,7 @@ function parseLine (stack, start, end) {
90
100
  [fileName, lineNumber, columnNumber, index] = result
91
101
 
92
102
  if (isNodeModulesFrame(fileName)) return
103
+ if (SHOULD_FILTER_DD_TRACE_INSTRUMENTAION && isDDInstrumentationFile(fileName)) return
93
104
 
94
105
  // parse method name
95
106
  let methodName, functionName
@@ -153,6 +164,10 @@ function isNodeModulesFrame (fileName) {
153
164
  return relativePath.startsWith(NODE_MODULES_PATTERN_START) || relativePath.includes(NODE_MODULES_PATTERN_MIDDLE)
154
165
  }
155
166
 
167
+ function isDDInstrumentationFile (fileName) {
168
+ return fileName.includes(`packages${sep}datadog-instrumentations${sep}src`)
169
+ }
170
+
156
171
  /**
157
172
  * A stack trace location can be in one of the following formats:
158
173
  *
@@ -233,6 +233,7 @@
233
233
  "DD_TRACE_AZURE_EVENTHUBS_BATCH_LINKS_ENABLED": ["A"],
234
234
  "DD_TRACE_AZURE_FUNCTIONS_ENABLED": ["A"],
235
235
  "DD_TRACE_AZURE_SERVICE_BUS_ENABLED": ["A"],
236
+ "DD_TRACE_AZURE_SERVICEBUS_BATCH_LINKS_ENABLED": ["A"],
236
237
  "DD_TRACE_BAGGAGE_MAX_BYTES": ["A"],
237
238
  "DD_TRACE_BAGGAGE_MAX_ITEMS": ["A"],
238
239
  "DD_TRACE_BAGGAGE_TAG_KEYS": ["A"],
@@ -281,6 +282,7 @@
281
282
  "DD_TRACE_FASTIFY_ENABLED": ["A"],
282
283
  "DD_TRACE_FETCH_ENABLED": ["A"],
283
284
  "DD_TRACE_FIND_MY_WAY_ENABLED": ["A"],
285
+ "DD_TRACE_FLUSH_INTERVAL": ["A"],
284
286
  "DD_TRACE_FS_ENABLED": ["A"],
285
287
  "DD_TRACE_GENERIC_POOL_ENABLED": ["A"],
286
288
  "DD_TRACE_GIT_METADATA_ENABLED": ["A"],
@@ -4,6 +4,8 @@ const dc = require('dc-polyfill')
4
4
  const { sendData } = require('./send-data')
5
5
 
6
6
  const fastifyRouteCh = dc.channel('apm:fastify:route:added')
7
+ const expressRouteCh = dc.channel('apm:express:route:added')
8
+ const routerRouteCh = dc.channel('apm:router:route:added')
7
9
 
8
10
  let config
9
11
  let application
@@ -16,6 +18,7 @@ let updateRetryData
16
18
  * Map key is `${METHOD} ${PATH}`, value is { method, path }
17
19
  */
18
20
  const pendingEndpoints = new Map()
21
+ const wildcardEndpoints = new Set()
19
22
  let flushScheduled = false
20
23
  let isFirstPayload = true
21
24
 
@@ -42,12 +45,31 @@ function onFastifyRoute (routeData) {
42
45
  if (!routeOptions?.path) return
43
46
 
44
47
  const methods = Array.isArray(routeOptions.method) ? routeOptions.method : [routeOptions.method]
45
-
46
48
  for (const method of methods) {
47
49
  recordEndpoint(method, routeOptions.path)
48
50
  }
49
51
  }
50
52
 
53
+ function onExpressRoute ({ method, path }) {
54
+ if (!method || !path) return
55
+
56
+ // If wildcard already recorded for this path, skip specific methods
57
+ if (wildcardEndpoints.has(path)) return
58
+
59
+ recordEndpoint(method, path)
60
+
61
+ // If this is a wildcard event, record it and mark path as wildcarded
62
+ if (method === '*') {
63
+ wildcardEndpoints.add(path)
64
+ return
65
+ }
66
+
67
+ // Express automatically adds HEAD support for GET routes
68
+ if (method.toUpperCase() === 'GET') {
69
+ recordEndpoint('HEAD', path)
70
+ }
71
+ }
72
+
51
73
  function buildEndpointObjects (endpoints) {
52
74
  return endpoints.map(({ method, path }) => {
53
75
  return {
@@ -108,10 +130,14 @@ function start (_config = {}, _application, _host, getRetryDataFunction, updateR
108
130
  updateRetryData = updateRetryDataFunction
109
131
 
110
132
  fastifyRouteCh.subscribe(onFastifyRoute)
133
+ expressRouteCh.subscribe(onExpressRoute)
134
+ routerRouteCh.subscribe(onExpressRoute)
111
135
  }
112
136
 
113
137
  function stop () {
114
138
  fastifyRouteCh.unsubscribe(onFastifyRoute)
139
+ expressRouteCh.unsubscribe(onExpressRoute)
140
+ routerRouteCh.unsubscribe(onExpressRoute)
115
141
 
116
142
  pendingEndpoints.clear()
117
143
  flushScheduled = false
@@ -1,23 +1,26 @@
1
1
  'use strict'
2
2
 
3
- const activate = () => {
4
- const active = require('./telemetry')
3
+ let telemetry
5
4
 
6
- return Object.setPrototypeOf(module.exports, active)
7
- }
8
-
9
- const inactive = {
5
+ // Lazy load the telemetry module to avoid the performance impact of loading it unconditionally
6
+ module.exports = {
10
7
  start (config, ...args) {
11
- return config?.telemetry?.enabled && activate().start(config, ...args)
8
+ telemetry ??= require('./telemetry')
9
+ telemetry.start(config, ...args)
10
+ },
11
+ stop () {
12
+ telemetry?.stop()
12
13
  },
13
- stop () {},
14
14
  // This might be called before `start` so we have to trigger loading the
15
15
  // underlying module here as well.
16
16
  updateConfig (changes, config, ...args) {
17
- return config?.telemetry?.enabled && activate().updateConfig(changes, config, ...args)
17
+ telemetry ??= require('./telemetry')
18
+ telemetry.updateConfig(changes, config, ...args)
18
19
  },
19
- updateIntegrations () {},
20
- appClosing () {}
20
+ updateIntegrations () {
21
+ telemetry?.updateIntegrations()
22
+ },
23
+ appClosing () {
24
+ telemetry?.appClosing()
25
+ }
21
26
  }
22
-
23
- module.exports = Object.setPrototypeOf({}, inactive)
@@ -17,7 +17,9 @@ const nodeMajor = Number(process.versions.node.split('.')[0])
17
17
 
18
18
  const min = Number(requirePackageJson(path.join(__dirname, '..')).engines.node.match(/\d+/)[0])
19
19
 
20
- const hasIgnoreEngines = npmArgv &&
20
+ // Most package managers don't support `npm_config_argv`, so we need a custom
21
+ // flag to allow installing dd-trace on unsupported engines.
22
+ const hasIgnoreEngines = process.env._DD_IGNORE_ENGINES === 'true' || npmArgv &&
21
23
  npmArgv.original &&
22
24
  npmArgv.original.includes('--ignore-engines')
23
25
 
package/version.js CHANGED
@@ -13,5 +13,6 @@ module.exports = {
13
13
  DD_PATCH: parseInt(ddMatches[3]),
14
14
  NODE_MAJOR: parseInt(nodeMatches[1]),
15
15
  NODE_MINOR: parseInt(nodeMatches[2]),
16
- NODE_PATCH: parseInt(nodeMatches[3])
16
+ NODE_PATCH: parseInt(nodeMatches[3]),
17
+ NODE_VERSION: nodeMatches[0]
17
18
  }