dd-trace 4.18.0 → 4.20.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/LICENSE-3rdparty.csv +2 -2
  2. package/README.md +3 -3
  3. package/ext/kinds.d.ts +1 -0
  4. package/ext/kinds.js +2 -1
  5. package/ext/tags.d.ts +2 -1
  6. package/ext/tags.js +6 -1
  7. package/package.json +6 -6
  8. package/packages/datadog-core/src/storage/async_resource.js +1 -1
  9. package/packages/datadog-esbuild/index.js +1 -20
  10. package/packages/datadog-instrumentations/src/helpers/bundler-register.js +1 -2
  11. package/packages/datadog-instrumentations/src/helpers/instrument.js +1 -1
  12. package/packages/datadog-instrumentations/src/helpers/register.js +1 -1
  13. package/packages/datadog-instrumentations/src/restify.js +14 -1
  14. package/packages/datadog-plugin-kafkajs/src/consumer.js +8 -6
  15. package/packages/datadog-plugin-kafkajs/src/producer.js +9 -6
  16. package/packages/dd-trace/src/appsec/channels.js +1 -1
  17. package/packages/dd-trace/src/appsec/iast/iast-log.js +1 -1
  18. package/packages/dd-trace/src/appsec/iast/iast-plugin.js +1 -1
  19. package/packages/dd-trace/src/appsec/iast/index.js +1 -1
  20. package/packages/dd-trace/src/appsec/iast/path-line.js +1 -1
  21. package/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js +1 -1
  22. package/packages/dd-trace/src/appsec/index.js +1 -1
  23. package/packages/dd-trace/src/appsec/recommended.json +272 -48
  24. package/packages/dd-trace/src/appsec/reporter.js +31 -34
  25. package/packages/dd-trace/src/appsec/rule_manager.js +9 -6
  26. package/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js +17 -7
  27. package/packages/dd-trace/src/config.js +12 -5
  28. package/packages/dd-trace/src/datastreams/processor.js +60 -15
  29. package/packages/dd-trace/src/format.js +6 -1
  30. package/packages/dd-trace/src/id.js +12 -0
  31. package/packages/dd-trace/src/iitm.js +1 -1
  32. package/packages/dd-trace/src/log/channels.js +1 -1
  33. package/packages/dd-trace/src/opentelemetry/span.js +95 -2
  34. package/packages/dd-trace/src/opentelemetry/tracer.js +9 -10
  35. package/packages/dd-trace/src/opentracing/propagation/text_map.js +14 -5
  36. package/packages/dd-trace/src/opentracing/span.js +4 -0
  37. package/packages/dd-trace/src/opentracing/span_context.js +5 -2
  38. package/packages/dd-trace/src/plugin_manager.js +1 -1
  39. package/packages/dd-trace/src/plugins/database.js +1 -1
  40. package/packages/dd-trace/src/plugins/plugin.js +1 -1
  41. package/packages/dd-trace/src/plugins/util/ci.js +6 -19
  42. package/packages/dd-trace/src/plugins/util/git.js +2 -1
  43. package/packages/dd-trace/src/plugins/util/ip_extractor.js +7 -6
  44. package/packages/dd-trace/src/plugins/util/url.js +26 -0
  45. package/packages/dd-trace/src/plugins/util/user-provided-git.js +1 -14
  46. package/packages/dd-trace/src/profiling/config.js +18 -2
  47. package/packages/dd-trace/src/profiling/profilers/events.js +166 -0
  48. package/packages/dd-trace/src/profiling/profilers/shared.js +9 -0
  49. package/packages/dd-trace/src/profiling/profilers/wall.js +116 -58
  50. package/packages/dd-trace/src/ritm.js +1 -1
  51. package/packages/dd-trace/src/span_processor.js +4 -0
  52. package/packages/dd-trace/src/telemetry/dependencies.js +1 -1
  53. package/packages/dd-trace/src/telemetry/index.js +1 -1
  54. package/packages/dd-trace/src/telemetry/logs/index.js +1 -1
  55. package/packages/dd-trace/src/tracer.js +4 -2
  56. package/packages/diagnostics_channel/index.js +0 -3
  57. package/packages/diagnostics_channel/src/index.js +0 -121
@@ -235,8 +235,9 @@ class Config {
235
235
 
236
236
  const inServerlessEnvironment = inAWSLambda || isGCPFunction || isAzureFunctionConsumptionPlan
237
237
 
238
- const DD_TRACE_TELEMETRY_ENABLED = coalesce(
239
- process.env.DD_TRACE_TELEMETRY_ENABLED,
238
+ const DD_INSTRUMENTATION_TELEMETRY_ENABLED = coalesce(
239
+ process.env.DD_TRACE_TELEMETRY_ENABLED, // for backward compatibility
240
+ process.env.DD_INSTRUMENTATION_TELEMETRY_ENABLED, // to comply with instrumentation telemetry specs
240
241
  !inServerlessEnvironment
241
242
  )
242
243
  const DD_TELEMETRY_HEARTBEAT_INTERVAL = process.env.DD_TELEMETRY_HEARTBEAT_INTERVAL
@@ -308,6 +309,10 @@ class Config {
308
309
  options.tracePropagationStyle,
309
310
  defaultPropagationStyle
310
311
  )
312
+ const DD_TRACE_PROPAGATION_EXTRACT_FIRST = coalesce(
313
+ process.env.DD_TRACE_PROPAGATION_EXTRACT_FIRST,
314
+ false
315
+ )
311
316
  const DD_TRACE_RUNTIME_ID_ENABLED = coalesce(
312
317
  options.experimental && options.experimental.runtimeId,
313
318
  process.env.DD_TRACE_EXPERIMENTAL_RUNTIME_ID_ENABLED,
@@ -367,10 +372,11 @@ class Config {
367
372
  isGCPFunction || isAzureFunctionConsumptionPlan
368
373
  )
369
374
 
375
+ // the tracer generates 128 bit IDs by default as of v5
370
376
  const DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED = coalesce(
371
377
  options.traceId128BitGenerationEnabled,
372
378
  process.env.DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED,
373
- false
379
+ true
374
380
  )
375
381
 
376
382
  const DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED = coalesce(
@@ -577,6 +583,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)
577
583
  inject: DD_TRACE_PROPAGATION_STYLE_INJECT,
578
584
  extract: DD_TRACE_PROPAGATION_STYLE_EXTRACT
579
585
  }
586
+ this.tracePropagationExtractFirst = isTrue(DD_TRACE_PROPAGATION_EXTRACT_FIRST)
580
587
  this.experimental = {
581
588
  runtimeId: isTrue(DD_TRACE_RUNTIME_ID_ENABLED),
582
589
  exporter: DD_TRACE_EXPORTER,
@@ -598,7 +605,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)
598
605
  this.startupLogs = isTrue(DD_TRACE_STARTUP_LOGS)
599
606
  // Disabled for CI Visibility's agentless
600
607
  this.telemetry = {
601
- enabled: DD_TRACE_EXPORTER !== 'datadog' && isTrue(DD_TRACE_TELEMETRY_ENABLED),
608
+ enabled: DD_TRACE_EXPORTER !== 'datadog' && isTrue(DD_INSTRUMENTATION_TELEMETRY_ENABLED),
602
609
  heartbeatInterval: DD_TELEMETRY_HEARTBEAT_INTERVAL,
603
610
  debug: isTrue(DD_TELEMETRY_DEBUG),
604
611
  logCollection: isTrue(DD_TELEMETRY_LOG_COLLECTION_ENABLED),
@@ -608,7 +615,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)
608
615
  this.tagsHeaderMaxLength = parseInt(DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH)
609
616
  this.appsec = {
610
617
  enabled: DD_APPSEC_ENABLED,
611
- rules: DD_APPSEC_RULES ? safeJsonParse(maybeFile(DD_APPSEC_RULES)) : require('./appsec/recommended.json'),
618
+ rules: DD_APPSEC_RULES,
612
619
  customRulesProvided: !!DD_APPSEC_RULES,
613
620
  rateLimit: DD_APPSEC_TRACE_RATE_LIMIT,
614
621
  wafTimeout: DD_APPSEC_WAF_TIMEOUT,
@@ -4,12 +4,16 @@ const pkg = require('../../../../package.json')
4
4
  const Uint64 = require('int64-buffer').Uint64BE
5
5
 
6
6
  const { LogCollapsingLowestDenseDDSketch } = require('@datadog/sketches-js')
7
-
7
+ const { encodePathwayContext } = require('./pathway')
8
8
  const { DataStreamsWriter } = require('./writer')
9
9
  const { computePathwayHash } = require('./pathway')
10
+ const { types } = require('util')
11
+ const { PATHWAY_HASH } = require('../../../../ext/tags')
12
+
10
13
  const ENTRY_PARENT_HASH = Buffer.from('0000000000000000', 'hex')
11
14
 
12
15
  const HIGH_ACCURACY_DISTRIBUTION = 0.0075
16
+ const CONTEXT_PROPAGATION_KEY = 'dd-pathway-ctx'
13
17
 
14
18
  class StatsPoint {
15
19
  constructor (hash, parentHash, edgeTags) {
@@ -18,6 +22,7 @@ class StatsPoint {
18
22
  this.edgeTags = edgeTags
19
23
  this.edgeLatency = new LogCollapsingLowestDenseDDSketch(HIGH_ACCURACY_DISTRIBUTION)
20
24
  this.pathwayLatency = new LogCollapsingLowestDenseDDSketch(HIGH_ACCURACY_DISTRIBUTION)
25
+ this.payloadSize = new LogCollapsingLowestDenseDDSketch(HIGH_ACCURACY_DISTRIBUTION)
21
26
  }
22
27
 
23
28
  addLatencies (checkpoint) {
@@ -25,6 +30,7 @@ class StatsPoint {
25
30
  const pathwayLatencySec = checkpoint.pathwayLatencyNs / 1e9
26
31
  this.edgeLatency.accept(edgeLatencySec)
27
32
  this.pathwayLatency.accept(pathwayLatencySec)
33
+ this.payloadSize.accept(checkpoint.payloadSize)
28
34
  }
29
35
 
30
36
  encode () {
@@ -33,7 +39,8 @@ class StatsPoint {
33
39
  ParentHash: this.parentHash,
34
40
  EdgeTags: this.edgeTags,
35
41
  EdgeLatency: this.edgeLatency.toProto(),
36
- PathwayLatency: this.pathwayLatency.toProto()
42
+ PathwayLatency: this.pathwayLatency.toProto(),
43
+ PayloadSize: this.payloadSize.toProto()
37
44
  }
38
45
  }
39
46
  }
@@ -49,6 +56,29 @@ class StatsBucket extends Map {
49
56
  }
50
57
  }
51
58
 
59
+ function getSizeOrZero (obj) {
60
+ if (typeof obj === 'string') {
61
+ return Buffer.from(obj, 'utf-8').length
62
+ }
63
+ if (types.isArrayBuffer(obj)) {
64
+ return obj.byteLength
65
+ }
66
+ if (Buffer.isBuffer(obj)) {
67
+ return obj.length
68
+ }
69
+ return 0
70
+ }
71
+
72
+ function getHeadersSize (headers) {
73
+ if (headers === undefined) return 0
74
+ return Object.entries(headers).reduce((prev, [key, val]) => getSizeOrZero(key) + getSizeOrZero(val) + prev, 0)
75
+ }
76
+
77
+ function getMessageSize (message) {
78
+ const { key, value, headers } = message
79
+ return getSizeOrZero(key) + getSizeOrZero(value) + getHeadersSize(headers)
80
+ }
81
+
52
82
  class TimeBuckets extends Map {
53
83
  forTime (time) {
54
84
  if (!this.has(time)) {
@@ -105,15 +135,19 @@ class DataStreamsProcessor {
105
135
  this.writer.flush(payload)
106
136
  }
107
137
 
108
- recordCheckpoint (checkpoint) {
138
+ recordCheckpoint (checkpoint, span = null) {
109
139
  if (!this.enabled) return
110
140
  const bucketTime = Math.round(checkpoint.currentTimestamp - (checkpoint.currentTimestamp % this.bucketSizeNs))
111
141
  this.buckets.forTime(bucketTime)
112
142
  .forCheckpoint(checkpoint)
113
143
  .addLatencies(checkpoint)
144
+ // set DSM pathway hash on span to enable related traces feature on DSM tab, convert from buffer to uint64
145
+ if (span) {
146
+ span.setTag(PATHWAY_HASH, checkpoint.hash.readBigUInt64BE(0).toString())
147
+ }
114
148
  }
115
149
 
116
- setCheckpoint (edgeTags, ctx = null) {
150
+ setCheckpoint (edgeTags, span, ctx = null, payloadSize = 0) {
117
151
  if (!this.enabled) return null
118
152
  const nowNs = Date.now() * 1e6
119
153
  const direction = edgeTags.find(t => t.startsWith('direction:'))
@@ -147,16 +181,7 @@ class DataStreamsProcessor {
147
181
  const hash = computePathwayHash(this.service, this.env, edgeTags, parentHash)
148
182
  const edgeLatencyNs = nowNs - edgeStartNs
149
183
  const pathwayLatencyNs = nowNs - pathwayStartNs
150
- const checkpoint = {
151
- currentTimestamp: nowNs,
152
- parentHash: parentHash,
153
- hash: hash,
154
- edgeTags: edgeTags,
155
- edgeLatencyNs: edgeLatencyNs,
156
- pathwayLatencyNs: pathwayLatencyNs
157
- }
158
- this.recordCheckpoint(checkpoint)
159
- return {
184
+ const dataStreamsContext = {
160
185
  hash: hash,
161
186
  edgeStartNs: edgeStartNs,
162
187
  pathwayStartNs: pathwayStartNs,
@@ -164,6 +189,22 @@ class DataStreamsProcessor {
164
189
  closestOppositeDirectionHash: closestOppositeDirectionHash,
165
190
  closestOppositeDirectionEdgeStart: closestOppositeDirectionEdgeStart
166
191
  }
192
+ if (direction === 'direction:out') {
193
+ // Add the header for this now, as the callee doesn't have access to context when producing
194
+ payloadSize += getSizeOrZero(encodePathwayContext(dataStreamsContext))
195
+ payloadSize += CONTEXT_PROPAGATION_KEY.length
196
+ }
197
+ const checkpoint = {
198
+ currentTimestamp: nowNs,
199
+ parentHash: parentHash,
200
+ hash: hash,
201
+ edgeTags: edgeTags,
202
+ edgeLatencyNs: edgeLatencyNs,
203
+ pathwayLatencyNs: pathwayLatencyNs,
204
+ payloadSize: payloadSize
205
+ }
206
+ this.recordCheckpoint(checkpoint, span)
207
+ return dataStreamsContext
167
208
  }
168
209
 
169
210
  _serializeBuckets () {
@@ -194,5 +235,9 @@ module.exports = {
194
235
  StatsPoint: StatsPoint,
195
236
  StatsBucket: StatsBucket,
196
237
  TimeBuckets,
197
- ENTRY_PARENT_HASH
238
+ getMessageSize,
239
+ getHeadersSize,
240
+ getSizeOrZero,
241
+ ENTRY_PARENT_HASH,
242
+ CONTEXT_PROPAGATION_KEY
198
243
  }
@@ -14,7 +14,7 @@ const SPAN_SAMPLING_MECHANISM = constants.SPAN_SAMPLING_MECHANISM
14
14
  const SPAN_SAMPLING_RULE_RATE = constants.SPAN_SAMPLING_RULE_RATE
15
15
  const SPAN_SAMPLING_MAX_PER_SECOND = constants.SPAN_SAMPLING_MAX_PER_SECOND
16
16
  const SAMPLING_MECHANISM_SPAN = constants.SAMPLING_MECHANISM_SPAN
17
- const { MEASURED, BASE_SERVICE } = tags
17
+ const { MEASURED, BASE_SERVICE, ANALYTICS } = tags
18
18
  const ORIGIN_KEY = constants.ORIGIN_KEY
19
19
  const HOSTNAME_KEY = constants.HOSTNAME_KEY
20
20
  const TOP_LEVEL_KEY = constants.TOP_LEVEL_KEY
@@ -24,6 +24,7 @@ const ERROR_STACK = constants.ERROR_STACK
24
24
  const ERROR_TYPE = constants.ERROR_TYPE
25
25
 
26
26
  const map = {
27
+ 'operation.name': 'name',
27
28
  'service.name': 'service',
28
29
  'span.type': 'type',
29
30
  'resource.name': 'resource'
@@ -83,6 +84,7 @@ function extractTags (trace, span) {
83
84
 
84
85
  for (const tag in tags) {
85
86
  switch (tag) {
87
+ case 'operation.name':
86
88
  case 'service.name':
87
89
  case 'span.type':
88
90
  case 'resource.name':
@@ -92,6 +94,9 @@ function extractTags (trace, span) {
92
94
  case 'http.status_code':
93
95
  addTag(trace.meta, {}, tag, tags[tag] && String(tags[tag]))
94
96
  break
97
+ case 'analytics.event':
98
+ addTag({}, trace.metrics, ANALYTICS, tags[tag] === undefined || tags[tag] ? 1 : 0)
99
+ break
95
100
  case HOSTNAME_KEY:
96
101
  case MEASURED:
97
102
  addTag({}, trace.metrics, tag, tags[tag] === undefined || tags[tag] ? 1 : 0)
@@ -42,6 +42,18 @@ class Identifier {
42
42
  toJSON () {
43
43
  return this.toString()
44
44
  }
45
+
46
+ equals (other) {
47
+ const length = this._buffer.length
48
+ const otherLength = other._buffer.length
49
+
50
+ // Only compare the bytes available in both IDs.
51
+ for (let i = length, j = otherLength; i >= 0 && j >= 0; i--, j--) {
52
+ if (this._buffer[i] !== other._buffer[j]) return false
53
+ }
54
+
55
+ return true
56
+ }
45
57
  }
46
58
 
47
59
  // Create a buffer, using an optional hexadecimal value if provided.
@@ -3,7 +3,7 @@
3
3
  const semver = require('semver')
4
4
  const logger = require('./log')
5
5
  const { addHook } = require('import-in-the-middle')
6
- const dc = require('../../diagnostics_channel')
6
+ const dc = require('dc-polyfill')
7
7
 
8
8
  if (semver.satisfies(process.versions.node, '>=14.13.1')) {
9
9
  const moduleLoadStartChannel = dc.channel('dd-trace:moduleLoadStart')
@@ -1,6 +1,6 @@
1
1
  'use strict'
2
2
 
3
- const { channel } = require('../../../diagnostics_channel')
3
+ const { channel } = require('dc-polyfill')
4
4
 
5
5
  const Level = {
6
6
  Debug: 'debug',
@@ -11,6 +11,7 @@ const tracer = require('../../')
11
11
  const DatadogSpan = require('../opentracing/span')
12
12
  const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../constants')
13
13
  const { SERVICE_NAME, RESOURCE_NAME } = require('../../../../ext/tags')
14
+ const kinds = require('../../../../ext/kinds')
14
15
 
15
16
  const SpanContext = require('./span_context')
16
17
 
@@ -19,6 +20,93 @@ function hrTimeToMilliseconds (time) {
19
20
  return time[0] * 1e3 + time[1] / 1e6
20
21
  }
21
22
 
23
+ const spanKindNames = {
24
+ [api.SpanKind.INTERNAL]: kinds.INTERNAL,
25
+ [api.SpanKind.SERVER]: kinds.SERVER,
26
+ [api.SpanKind.CLIENT]: kinds.CLIENT,
27
+ [api.SpanKind.PRODUCER]: kinds.PRODUCER,
28
+ [api.SpanKind.CONSUMER]: kinds.CONSUMER
29
+ }
30
+
31
+ /**
32
+ * Several of these attributes are not yet supported by the Node.js OTel API.
33
+ * We check for old equivalents where we can, but not all had equivalents.
34
+ */
35
+ function spanNameMapper (spanName, kind, attributes) {
36
+ if (spanName) return spanName
37
+
38
+ const opName = attributes['operation.name']
39
+ if (opName) return opName
40
+
41
+ const { INTERNAL, SERVER, CLIENT } = api.SpanKind
42
+
43
+ // HTTP server and client requests
44
+ // TODO: Drop http.method when http.request.method is supported.
45
+ for (const key of ['http.method', 'http.request.method']) {
46
+ if (key in attributes) {
47
+ if (kind === SERVER) {
48
+ return 'http.server.request'
49
+ }
50
+ if (kind === CLIENT) {
51
+ return 'http.client.request'
52
+ }
53
+ }
54
+ }
55
+
56
+ // Databases
57
+ const dbSystem = attributes['db.system']
58
+ if (dbSystem && kind === CLIENT) {
59
+ return `${dbSystem}.query`
60
+ }
61
+
62
+ // Messaging
63
+ const msgSys = attributes['messaging.system']
64
+ const msgOp = attributes['messaging.operation']
65
+ if (msgSys && msgOp && kind !== INTERNAL) {
66
+ return `${msgSys}.${msgOp}`
67
+ }
68
+
69
+ // RPC (and AWS)
70
+ const rpcSystem = attributes['rpc.system']
71
+ if (rpcSystem) {
72
+ if (kind === CLIENT) {
73
+ return rpcSystem === 'aws-api'
74
+ ? `aws.${attributes['rpc.service'] || 'client'}.request`
75
+ : `${rpcSystem}.client.request`
76
+ }
77
+ if (kind === SERVER) {
78
+ return `${rpcSystem}.server.request`
79
+ }
80
+ }
81
+
82
+ // FaaS
83
+ const faasProvider = attributes['faas.invoked_provider']
84
+ const faasName = attributes['faas.invoked_name']
85
+ const faasTrigger = attributes['faas.trigger']
86
+ if (kind === CLIENT && faasProvider && faasName) {
87
+ return `${faasProvider}.${faasName}.invoke`
88
+ }
89
+ if (kind === SERVER && faasTrigger) {
90
+ return `${faasTrigger}.invoke`
91
+ }
92
+
93
+ // GraphQL
94
+ // NOTE: Not part of Semantic Convention spec yet, but is used in the GraphQL
95
+ // integration.
96
+ const isGraphQL = 'graphql.operation.type' in attributes
97
+ if (isGraphQL) return 'graphql.server.request'
98
+
99
+ // Network
100
+ // TODO: Doesn't exist yet. No equivalent.
101
+ const protocol = attributes['network.protocol.name']
102
+ const protocolPrefix = protocol ? `${protocol}.` : ''
103
+ if (kind === SERVER) return `${protocolPrefix}server.request`
104
+ if (kind === CLIENT) return `${protocolPrefix}client.request`
105
+
106
+ // If all else fails, default to stringified span.kind.
107
+ return spanKindNames[kind]
108
+ }
109
+
22
110
  class Span {
23
111
  constructor (
24
112
  parentTracer,
@@ -27,7 +115,8 @@ class Span {
27
115
  spanContext,
28
116
  kind,
29
117
  links = [],
30
- timeInput
118
+ timeInput,
119
+ attributes
31
120
  ) {
32
121
  const { _tracer } = tracer
33
122
 
@@ -35,7 +124,7 @@ class Span {
35
124
  const startTime = hrTimeToMilliseconds(hrStartTime)
36
125
 
37
126
  this._ddSpan = new DatadogSpan(_tracer, _tracer._processor, _tracer._prioritySampler, {
38
- operationName: spanName,
127
+ operationName: spanNameMapper(spanName, kind, attributes),
39
128
  context: spanContext._ddContext,
40
129
  startTime,
41
130
  hostname: _tracer._hostname,
@@ -46,6 +135,10 @@ class Span {
46
135
  }
47
136
  }, _tracer._debug)
48
137
 
138
+ if (attributes) {
139
+ this.setAttributes(attributes)
140
+ }
141
+
49
142
  this._parentTracer = parentTracer
50
143
  this._context = context
51
144
 
@@ -78,23 +78,22 @@ class Tracer {
78
78
  // return api.trace.wrapSpanContext(spanContext)
79
79
  // }
80
80
 
81
- const span = new Span(
81
+ return new Span(
82
82
  this,
83
83
  context,
84
84
  name,
85
85
  spanContext,
86
86
  spanKind,
87
87
  links,
88
- options.startTime
88
+ options.startTime,
89
+
90
+ // Set initial span attributes. The attributes object may have been mutated
91
+ // by the sampler, so we sanitize the merged attributes before setting them.
92
+ sanitizeAttributes(
93
+ // Object.assign(attributes, samplingResult.attributes)
94
+ attributes
95
+ )
89
96
  )
90
- // Set initial span attributes. The attributes object may have been mutated
91
- // by the sampler, so we sanitize the merged attributes before setting them.
92
- const initAttributes = sanitizeAttributes(
93
- // Object.assign(attributes, samplingResult.attributes)
94
- attributes
95
- )
96
- span.setAttributes(initAttributes)
97
- return span
98
97
  }
99
98
 
100
99
  startActiveSpan (name, options, context, fn) {
@@ -236,11 +236,20 @@ class TextMapPropagator {
236
236
  _extractDatadogContext (carrier) {
237
237
  const spanContext = this._extractGenericContext(carrier, traceKey, spanKey, 10)
238
238
 
239
- if (spanContext) {
240
- this._extractOrigin(carrier, spanContext)
241
- this._extractBaggageItems(carrier, spanContext)
242
- this._extractSamplingPriority(carrier, spanContext)
243
- this._extractTags(carrier, spanContext)
239
+ if (!spanContext) return spanContext
240
+
241
+ this._extractOrigin(carrier, spanContext)
242
+ this._extractBaggageItems(carrier, spanContext)
243
+ this._extractSamplingPriority(carrier, spanContext)
244
+ this._extractTags(carrier, spanContext)
245
+
246
+ if (this._config.tracePropagationExtractFirst) return spanContext
247
+
248
+ const tc = this._extractTraceparentContext(carrier)
249
+
250
+ if (tc && spanContext._traceId.equals(tc._traceId)) {
251
+ spanContext._traceparent = tc._traceparent
252
+ spanContext._tracestate = tc._tracestate
244
253
  }
245
254
 
246
255
  return spanContext
@@ -12,6 +12,7 @@ const runtimeMetrics = require('../runtime_metrics')
12
12
  const log = require('../log')
13
13
  const { storage } = require('../../../datadog-core')
14
14
  const telemetryMetrics = require('../telemetry/metrics')
15
+ const { channel } = require('dc-polyfill')
15
16
 
16
17
  const tracerMetrics = telemetryMetrics.manager.namespace('tracers')
17
18
 
@@ -30,6 +31,8 @@ const integrationCounters = {
30
31
  span_finished: {}
31
32
  }
32
33
 
34
+ const finishCh = channel('dd-trace:span:finish')
35
+
33
36
  function getIntegrationCounter (event, integration) {
34
37
  const counters = integrationCounters[event]
35
38
 
@@ -176,6 +179,7 @@ class DatadogSpan {
176
179
  this._duration = finishTime - this._startTime
177
180
  this._spanContext._trace.finished.push(this)
178
181
  this._spanContext._isFinished = true
182
+ finishCh.publish(this)
179
183
  this._processor.process(this)
180
184
  }
181
185
 
@@ -2,6 +2,9 @@
2
2
 
3
3
  const { AUTO_KEEP } = require('../../../../ext/priority')
4
4
 
5
+ // the lowercase, hex encoded upper 64 bits of a 128-bit trace id, if present
6
+ const TRACE_ID_128 = '_dd.p.tid'
7
+
5
8
  class DatadogSpanContext {
6
9
  constructor (props) {
7
10
  props = props || {}
@@ -35,8 +38,8 @@ class DatadogSpanContext {
35
38
 
36
39
  toTraceparent () {
37
40
  const flags = this._sampling.priority >= AUTO_KEEP ? '01' : '00'
38
- const traceId = this._traceId.toBuffer().length <= 8 && this._trace.tags['_dd.p.tid']
39
- ? this._trace.tags['_dd.p.tid'] + this._traceId.toString(16).padStart(16, '0')
41
+ const traceId = this._traceId.toBuffer().length <= 8 && this._trace.tags[TRACE_ID_128]
42
+ ? this._trace.tags[TRACE_ID_128] + this._traceId.toString(16).padStart(16, '0')
40
43
  : this._traceId.toString(16).padStart(32, '0')
41
44
  const spanId = this._spanId.toString(16).padStart(16, '0')
42
45
  const version = (this._traceparent && this._traceparent.version) || '00'
@@ -1,6 +1,6 @@
1
1
  'use strict'
2
2
 
3
- const { channel } = require('../../diagnostics_channel')
3
+ const { channel } = require('dc-polyfill')
4
4
  const { isFalse } = require('./util')
5
5
  const plugins = require('./plugins')
6
6
  const log = require('./log')
@@ -36,7 +36,7 @@ class DatabasePlugin extends StoragePlugin {
36
36
  const { encodedDddbs, encodedDde, encodedDdps, encodedDdpv } = this.serviceTags
37
37
 
38
38
  return `dddbs='${encodedDddbs}',dde='${encodedDde}',` +
39
- `ddps='${encodedDdps}',ddpv='${encodedDdpv}'`
39
+ `ddps='${encodedDdps}',ddpv='${encodedDdpv}'`
40
40
  }
41
41
 
42
42
  getDbmServiceName (span, tracerService) {
@@ -2,7 +2,7 @@
2
2
 
3
3
  // TODO: move anything related to tracing to TracingPlugin instead
4
4
 
5
- const dc = require('../../../diagnostics_channel')
5
+ const dc = require('dc-polyfill')
6
6
  const { storage } = require('../../../datadog-core')
7
7
 
8
8
  class Subscription {
@@ -1,5 +1,3 @@
1
- const URL = require('url').URL
2
-
3
1
  const {
4
2
  GIT_BRANCH,
5
3
  GIT_COMMIT_SHA,
@@ -24,6 +22,7 @@ const {
24
22
  CI_NODE_LABELS,
25
23
  CI_NODE_NAME
26
24
  } = require('./tags')
25
+ const { filterSensitiveInfoFromRepository } = require('./url')
27
26
 
28
27
  // Receives a string with the form 'John Doe <john.doe@gmail.com>'
29
28
  // and returns { name: 'John Doe', email: 'john.doe@gmail.com' }
@@ -67,20 +66,6 @@ function normalizeRef (ref) {
67
66
  return ref.replace(/origin\/|refs\/heads\/|tags\//gm, '')
68
67
  }
69
68
 
70
- function filterSensitiveInfoFromRepository (repositoryUrl) {
71
- if (repositoryUrl.startsWith('git@')) {
72
- return repositoryUrl
73
- }
74
-
75
- try {
76
- const { protocol, hostname, pathname } = new URL(repositoryUrl)
77
-
78
- return `${protocol}//${hostname}${pathname}`
79
- } catch (e) {
80
- return ''
81
- }
82
- }
83
-
84
69
  function resolveTilde (filePath) {
85
70
  if (!filePath || typeof filePath !== 'string') {
86
71
  return ''
@@ -271,20 +256,22 @@ module.exports = {
271
256
  const ref = GITHUB_HEAD_REF || GITHUB_REF || ''
272
257
  const refKey = ref.includes('tags/') ? GIT_TAG : GIT_BRANCH
273
258
 
259
+ // Both pipeline URL and job URL include GITHUB_SERVER_URL, which can include user credentials,
260
+ // so we pass them through `filterSensitiveInfoFromRepository`.
274
261
  tags = {
275
262
  [CI_PIPELINE_ID]: GITHUB_RUN_ID,
276
263
  [CI_PIPELINE_NAME]: GITHUB_WORKFLOW,
277
264
  [CI_PIPELINE_NUMBER]: GITHUB_RUN_NUMBER,
278
- [CI_PIPELINE_URL]: pipelineURL,
265
+ [CI_PIPELINE_URL]: filterSensitiveInfoFromRepository(pipelineURL),
279
266
  [CI_PROVIDER_NAME]: 'github',
280
267
  [GIT_COMMIT_SHA]: GITHUB_SHA,
281
268
  [GIT_REPOSITORY_URL]: repositoryURL,
282
- [CI_JOB_URL]: jobUrl,
269
+ [CI_JOB_URL]: filterSensitiveInfoFromRepository(jobUrl),
283
270
  [CI_JOB_NAME]: GITHUB_JOB,
284
271
  [CI_WORKSPACE_PATH]: GITHUB_WORKSPACE,
285
272
  [refKey]: ref,
286
273
  [CI_ENV_VARS]: JSON.stringify({
287
- GITHUB_SERVER_URL,
274
+ GITHUB_SERVER_URL: filterSensitiveInfoFromRepository(GITHUB_SERVER_URL),
288
275
  GITHUB_REPOSITORY,
289
276
  GITHUB_RUN_ID,
290
277
  GITHUB_RUN_ATTEMPT
@@ -19,6 +19,7 @@ const {
19
19
  GIT_COMMIT_AUTHOR_NAME,
20
20
  CI_WORKSPACE_PATH
21
21
  } = require('./tags')
22
+ const { filterSensitiveInfoFromRepository } = require('./url')
22
23
 
23
24
  const GIT_REV_LIST_MAX_BUFFER = 8 * 1024 * 1024 // 8MB
24
25
 
@@ -214,7 +215,7 @@ function getGitMetadata (ciMetadata) {
214
215
 
215
216
  return {
216
217
  [GIT_REPOSITORY_URL]:
217
- repositoryUrl || sanitizedExec('git', ['ls-remote', '--get-url']),
218
+ filterSensitiveInfoFromRepository(repositoryUrl || sanitizedExec('git', ['ls-remote', '--get-url'])),
218
219
  [GIT_COMMIT_MESSAGE]:
219
220
  commitMessage || sanitizedExec('git', ['show', '-s', '--format=%s']),
220
221
  [GIT_COMMIT_AUTHOR_DATE]: authorDate,
@@ -48,8 +48,8 @@ function extractIp (config, req) {
48
48
 
49
49
  let firstPrivateIp
50
50
  if (headers) {
51
- for (let i = 0; i < ipHeaderList.length; i++) {
52
- const firstIp = findFirstIp(headers[ipHeaderList[i]])
51
+ for (const ipHeaderName of ipHeaderList) {
52
+ const firstIp = findFirstIp(headers[ipHeaderName])
53
53
 
54
54
  if (firstIp.public) {
55
55
  return firstIp.public
@@ -59,7 +59,7 @@ function extractIp (config, req) {
59
59
  }
60
60
  }
61
61
 
62
- return firstPrivateIp || (req.socket && req.socket.remoteAddress)
62
+ return firstPrivateIp || req.socket?.remoteAddress
63
63
  }
64
64
 
65
65
  function findFirstIp (str) {
@@ -68,8 +68,8 @@ function findFirstIp (str) {
68
68
 
69
69
  const splitted = str.split(',')
70
70
 
71
- for (let i = 0; i < splitted.length; i++) {
72
- const chunk = splitted[i].trim()
71
+ for (const part of splitted) {
72
+ const chunk = part.trim()
73
73
 
74
74
  // TODO: strip port and interface data ?
75
75
 
@@ -90,5 +90,6 @@ function findFirstIp (str) {
90
90
  }
91
91
 
92
92
  module.exports = {
93
- extractIp
93
+ extractIp,
94
+ ipHeaderList
94
95
  }