dd-trace 5.100.0 → 5.101.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.d.ts +14 -0
- package/package.json +5 -5
- package/packages/datadog-instrumentations/src/cypress.js +5 -3
- package/packages/datadog-instrumentations/src/http/client.js +20 -3
- package/packages/datadog-instrumentations/src/jest.js +62 -32
- package/packages/datadog-instrumentations/src/mocha/common.js +4 -1
- package/packages/datadog-instrumentations/src/mocha/main.js +25 -4
- package/packages/datadog-instrumentations/src/mocha/worker.js +5 -2
- package/packages/datadog-instrumentations/src/otel-sdk-trace.js +11 -6
- package/packages/datadog-plugin-bullmq/src/consumer.js +2 -2
- package/packages/datadog-plugin-bullmq/src/producer.js +14 -20
- package/packages/datadog-plugin-cypress/src/cypress-plugin.js +17 -0
- package/packages/datadog-plugin-cypress/src/plugin.js +5 -14
- package/packages/datadog-plugin-kafkajs/src/consumer.js +2 -9
- package/packages/datadog-plugin-kafkajs/src/producer.js +2 -8
- package/packages/dd-trace/src/appsec/reporter.js +4 -1
- package/packages/dd-trace/src/ci-visibility/lage.js +2 -1
- package/packages/dd-trace/src/ci-visibility/requests/request.js +11 -33
- package/packages/dd-trace/src/config/config-types.d.ts +0 -2
- package/packages/dd-trace/src/config/index.js +1 -55
- package/packages/dd-trace/src/datastreams/checkpointer.js +4 -10
- package/packages/dd-trace/src/datastreams/encoding.js +39 -28
- package/packages/dd-trace/src/datastreams/pathway.js +29 -26
- package/packages/dd-trace/src/datastreams/processor.js +17 -15
- package/packages/dd-trace/src/datastreams/size.js +6 -2
- package/packages/dd-trace/src/debugger/config.js +5 -2
- package/packages/dd-trace/src/debugger/devtools_client/index.js +2 -5
- package/packages/dd-trace/src/debugger/devtools_client/send.js +2 -1
- package/packages/dd-trace/src/dogstatsd.js +10 -7
- package/packages/dd-trace/src/encode/0.4.js +2 -2
- package/packages/dd-trace/src/encode/0.5.js +2 -2
- package/packages/dd-trace/src/encode/agentless-json.js +2 -2
- package/packages/dd-trace/src/encode/tags-processors.js +2 -27
- package/packages/dd-trace/src/exporters/common/request.js +22 -11
- package/packages/dd-trace/src/exporters/common/retry.js +104 -0
- package/packages/dd-trace/src/git_metadata.js +66 -0
- package/packages/dd-trace/src/git_metadata_tagger.js +13 -5
- package/packages/dd-trace/src/id.js +15 -26
- package/packages/dd-trace/src/llmobs/constants/tags.js +2 -0
- package/packages/dd-trace/src/llmobs/plugins/anthropic/index.js +27 -16
- package/packages/dd-trace/src/llmobs/plugins/anthropic/util.js +3 -0
- package/packages/dd-trace/src/llmobs/plugins/genai/util.js +30 -13
- package/packages/dd-trace/src/llmobs/plugins/openai/index.js +20 -50
- package/packages/dd-trace/src/llmobs/sdk.js +5 -1
- package/packages/dd-trace/src/llmobs/span_processor.js +28 -2
- package/packages/dd-trace/src/llmobs/tagger.js +42 -0
- package/packages/dd-trace/src/llmobs/telemetry.js +29 -0
- package/packages/dd-trace/src/llmobs/util.js +80 -5
- package/packages/dd-trace/src/opentelemetry/active-span-proxy.js +42 -0
- package/packages/dd-trace/src/opentelemetry/bridge-span-base.js +106 -0
- package/packages/dd-trace/src/opentelemetry/context_manager.js +11 -2
- package/packages/dd-trace/src/opentelemetry/span-helpers.js +188 -50
- package/packages/dd-trace/src/opentelemetry/span.js +42 -80
- package/packages/dd-trace/src/opentracing/propagation/text_map.js +65 -27
- package/packages/dd-trace/src/opentracing/propagation/tracestate.js +58 -22
- package/packages/dd-trace/src/opentracing/span.js +56 -48
- package/packages/dd-trace/src/opentracing/span_context.js +1 -0
- package/packages/dd-trace/src/priority_sampler.js +6 -4
- package/packages/dd-trace/src/profiling/config.js +5 -4
- package/packages/dd-trace/src/remote_config/index.js +5 -3
- package/packages/dd-trace/src/span_format.js +52 -5
- package/packages/dd-trace/src/span_processor.js +0 -4
- package/packages/dd-trace/src/spanleak.js +0 -1
- package/packages/dd-trace/src/util.js +17 -0
|
@@ -23,22 +23,13 @@ const noopTask = {
|
|
|
23
23
|
module.exports = function CypressPlugin (on, config) {
|
|
24
24
|
const tracer = require('../../dd-trace')
|
|
25
25
|
|
|
26
|
-
if (satisfies(config.version, '<
|
|
27
|
-
if (DD_MAJOR >= 6) {
|
|
28
|
-
// eslint-disable-next-line no-console
|
|
29
|
-
console.error(
|
|
30
|
-
'ERROR: dd-trace v6 has deleted support for Cypress<10.2.0.'
|
|
31
|
-
)
|
|
32
|
-
on('task', noopTask)
|
|
33
|
-
return config
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
// console.warn does not seem to work in cypress, so using console.log instead
|
|
26
|
+
if (DD_MAJOR >= 6 && satisfies(config.version, '<12.0.0')) {
|
|
37
27
|
// eslint-disable-next-line no-console
|
|
38
|
-
console.
|
|
39
|
-
'
|
|
40
|
-
' and will not be supported in future versions of dd-trace.'
|
|
28
|
+
console.error(
|
|
29
|
+
'ERROR: dd-trace v6 has deleted support for Cypress<12.0.0.'
|
|
41
30
|
)
|
|
31
|
+
on('task', noopTask)
|
|
32
|
+
return config
|
|
42
33
|
}
|
|
43
34
|
|
|
44
35
|
// The tracer was not init correctly for whatever reason (such as invalid DD_SITE)
|
|
@@ -56,15 +56,8 @@ class KafkajsConsumerPlugin extends ConsumerPlugin {
|
|
|
56
56
|
|
|
57
57
|
commit (commitList) {
|
|
58
58
|
if (!this.config.dsmEnabled) return
|
|
59
|
-
const
|
|
60
|
-
|
|
61
|
-
'type',
|
|
62
|
-
'partition',
|
|
63
|
-
'offset',
|
|
64
|
-
'topic',
|
|
65
|
-
]
|
|
66
|
-
for (const commit of commitList.map(this.transformCommit)) {
|
|
67
|
-
if (keys.some(key => !commit.hasOwnProperty(key))) continue
|
|
59
|
+
for (const rawCommit of commitList) {
|
|
60
|
+
const commit = this.transformCommit(rawCommit)
|
|
68
61
|
this.tracer.setOffset(commit)
|
|
69
62
|
}
|
|
70
63
|
}
|
|
@@ -64,14 +64,8 @@ class KafkajsProducerPlugin extends ProducerPlugin {
|
|
|
64
64
|
|
|
65
65
|
if (!this.config.dsmEnabled) return
|
|
66
66
|
if (!commitList || !Array.isArray(commitList)) return
|
|
67
|
-
const
|
|
68
|
-
|
|
69
|
-
'partition',
|
|
70
|
-
'offset',
|
|
71
|
-
'topic',
|
|
72
|
-
]
|
|
73
|
-
for (const commit of commitList.map(r => this.transformProduceResponse(r, clusterId))) {
|
|
74
|
-
if (keys.some(key => !commit.hasOwnProperty(key))) continue
|
|
67
|
+
for (const rawCommit of commitList) {
|
|
68
|
+
const commit = this.transformProduceResponse(rawCommit, clusterId)
|
|
75
69
|
this.tracer.setOffset(commit)
|
|
76
70
|
}
|
|
77
71
|
}
|
|
@@ -8,6 +8,7 @@ const web = require('../plugins/util/web')
|
|
|
8
8
|
const { ipHeaderList } = require('../plugins/util/ip_extractor')
|
|
9
9
|
const { keepTrace } = require('../priority_sampler')
|
|
10
10
|
const { ASM } = require('../standalone/product')
|
|
11
|
+
const { isEmpty } = require('../util')
|
|
11
12
|
const { getActiveRequest } = require('./store')
|
|
12
13
|
const {
|
|
13
14
|
incrementWafInitMetric,
|
|
@@ -170,7 +171,9 @@ function getCollectedHeaders (req, res, shouldCollectEventHeaders, storedRespons
|
|
|
170
171
|
// Basic collection
|
|
171
172
|
if (!shouldCollectEventHeaders) return mandatoryCollectedHeaders
|
|
172
173
|
|
|
173
|
-
|
|
174
|
+
// Skip the spread when the stored side is empty -- common during the early
|
|
175
|
+
// request lifecycle when no upstream response headers have been captured.
|
|
176
|
+
const responseHeaders = isEmpty(storedResponseHeaders)
|
|
174
177
|
? res.getHeaders()
|
|
175
178
|
: { ...storedResponseHeaders, ...res.getHeaders() }
|
|
176
179
|
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
const { getEnvironmentVariable } = require('../config/helper')
|
|
4
4
|
const { isTrue } = require('../util')
|
|
5
|
+
const { DD_MAJOR } = require('../../../../version')
|
|
5
6
|
|
|
6
7
|
/**
|
|
7
8
|
* Returns the current Lage package name if the Lage package name override is enabled.
|
|
@@ -9,7 +10,7 @@ const { isTrue } = require('../util')
|
|
|
9
10
|
* @returns {string|undefined}
|
|
10
11
|
*/
|
|
11
12
|
function getLagePackageName () {
|
|
12
|
-
if (!isTrue(getEnvironmentVariable('DD_ENABLE_LAGE_PACKAGE_NAME'))) {
|
|
13
|
+
if (DD_MAJOR < 6 && !isTrue(getEnvironmentVariable('DD_ENABLE_LAGE_PACKAGE_NAME'))) {
|
|
13
14
|
return
|
|
14
15
|
}
|
|
15
16
|
|
|
@@ -7,39 +7,13 @@ const zlib = require('zlib')
|
|
|
7
7
|
const { storage } = require('../../../../datadog-core')
|
|
8
8
|
const log = require('../../log')
|
|
9
9
|
const { httpAgent, httpsAgent } = require('../../exporters/common/agents')
|
|
10
|
+
const {
|
|
11
|
+
RATE_LIMIT_MAX_WAIT_MS,
|
|
12
|
+
isRetriableNetworkError,
|
|
13
|
+
singleJitteredDelay,
|
|
14
|
+
} = require('../../exporters/common/retry')
|
|
10
15
|
const { urlToHttpOptions } = require('../../exporters/common/url-to-http-options-polyfill')
|
|
11
16
|
|
|
12
|
-
const RATE_LIMIT_MAX_WAIT_MS = 30_000
|
|
13
|
-
const RETRY_BASE_MS = 5000
|
|
14
|
-
const RETRY_JITTER_MS = 2500
|
|
15
|
-
|
|
16
|
-
/**
|
|
17
|
-
* Calculates retry delay with jitter to prevent thundering herd.
|
|
18
|
-
* Delay is RETRY_BASE_MS + random(0, RETRY_JITTER_MS) (e.g. 5–7.5 seconds).
|
|
19
|
-
*
|
|
20
|
-
* @returns {number} Delay in milliseconds
|
|
21
|
-
*/
|
|
22
|
-
function getRetryDelay () {
|
|
23
|
-
return RETRY_BASE_MS + (Math.random() * RETRY_JITTER_MS)
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
/**
|
|
27
|
-
* Determines if a network error is retriable (transient failures only).
|
|
28
|
-
* ECONNREFUSED is retried because it can be transient (service starting up,
|
|
29
|
-
* restarts, rolling deploys, k8s pod/readiness transitions). ENOTFOUND is
|
|
30
|
-
* excluded as it indicates DNS failure or wrong host and is usually not transient.
|
|
31
|
-
*
|
|
32
|
-
* @param {Error} err - The error to check
|
|
33
|
-
* @returns {boolean}
|
|
34
|
-
*/
|
|
35
|
-
function isRetriableNetworkError (err) {
|
|
36
|
-
if (!err.code) return false
|
|
37
|
-
return err.code === 'ECONNREFUSED' ||
|
|
38
|
-
err.code === 'ECONNRESET' ||
|
|
39
|
-
err.code === 'ETIMEDOUT' ||
|
|
40
|
-
err.code === 'EPIPE'
|
|
41
|
-
}
|
|
42
|
-
|
|
43
17
|
function parseUrl (urlObjOrString) {
|
|
44
18
|
if (urlObjOrString !== null && typeof urlObjOrString === 'object') {
|
|
45
19
|
return urlToHttpOptions(urlObjOrString)
|
|
@@ -63,6 +37,10 @@ function parseUrl (urlObjOrString) {
|
|
|
63
37
|
* Destroys connections on errors to prevent reuse of bad connections. Preserves
|
|
64
38
|
* original status code across retries for telemetry.
|
|
65
39
|
*
|
|
40
|
+
* Retry timers stay ref'd. Test-runner plugins block the suite via
|
|
41
|
+
* `delay: true` channels until this callback fires; an unref'd retry would
|
|
42
|
+
* let the host exit first and the suite would never run.
|
|
43
|
+
*
|
|
66
44
|
* @param {string} data - Request body (e.g. JSON string)
|
|
67
45
|
* @param {object} options - { url, path?, method?, headers?, timeout? } (may be mutated)
|
|
68
46
|
* @param {Function} callback - (err, res, statusCode) => void
|
|
@@ -157,7 +135,7 @@ function request (data, options, callback) {
|
|
|
157
135
|
// ignore
|
|
158
136
|
}
|
|
159
137
|
hasRetried = true
|
|
160
|
-
setTimeout(makeRequest,
|
|
138
|
+
setTimeout(makeRequest, singleJitteredDelay())
|
|
161
139
|
return
|
|
162
140
|
}
|
|
163
141
|
|
|
@@ -177,7 +155,7 @@ function request (data, options, callback) {
|
|
|
177
155
|
// Retry on retriable network errors
|
|
178
156
|
if (!hasRetried && isRetriableNetworkError(err)) {
|
|
179
157
|
hasRetried = true
|
|
180
|
-
setTimeout(makeRequest,
|
|
158
|
+
setTimeout(makeRequest, singleJitteredDelay())
|
|
181
159
|
return
|
|
182
160
|
}
|
|
183
161
|
|
|
@@ -8,7 +8,6 @@ export interface ConfigProperties extends GeneratedConfig {
|
|
|
8
8
|
responsesEnabled: boolean
|
|
9
9
|
rules: PayloadTaggingRules
|
|
10
10
|
}
|
|
11
|
-
commitSHA: string | undefined
|
|
12
11
|
debug: boolean
|
|
13
12
|
instrumentationSource: 'manual' | 'ssi'
|
|
14
13
|
isCiVisibility: boolean
|
|
@@ -18,7 +17,6 @@ export interface ConfigProperties extends GeneratedConfig {
|
|
|
18
17
|
lookup: NonNullable<import('../../../../index').TracerOptions['lookup']>
|
|
19
18
|
readonly parsedDdTags: Record<string, string>
|
|
20
19
|
plugins: boolean
|
|
21
|
-
repositoryUrl: string | undefined
|
|
22
20
|
sampler: {
|
|
23
21
|
rateLimit: number
|
|
24
22
|
rules: import('../../../../index').SamplingRule[]
|
|
@@ -3,7 +3,6 @@
|
|
|
3
3
|
const fs = require('node:fs')
|
|
4
4
|
const os = require('node:os')
|
|
5
5
|
const { URL } = require('node:url')
|
|
6
|
-
const path = require('node:path')
|
|
7
6
|
|
|
8
7
|
const rfdc = require('../../../../vendor/dist/rfdc')({ proto: false, circles: false })
|
|
9
8
|
const uuid = require('../../../../vendor/dist/crypto-randomuuid') // we need to keep the old uuid dep because of cypress
|
|
@@ -12,7 +11,6 @@ const { DD_MAJOR } = require('../../../../version')
|
|
|
12
11
|
const log = require('../log')
|
|
13
12
|
const pkg = require('../pkg')
|
|
14
13
|
const { isTrue } = require('../util')
|
|
15
|
-
const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('../plugins/util/tags')
|
|
16
14
|
const telemetry = require('../telemetry')
|
|
17
15
|
const telemetryMetrics = require('../telemetry/metrics')
|
|
18
16
|
const {
|
|
@@ -22,8 +20,6 @@ const {
|
|
|
22
20
|
} = require('../serverless')
|
|
23
21
|
const { ORIGIN_KEY, DATADOG_MINI_AGENT_PATH } = require('../constants')
|
|
24
22
|
const { appendRules } = require('../payload-tagging/config')
|
|
25
|
-
const { getGitMetadataFromGitProperties, removeUserSensitiveInfo, getRemoteOriginURL, resolveGitHeadSHA } =
|
|
26
|
-
require('./git_properties')
|
|
27
23
|
const ConfigBase = require('./config-base')
|
|
28
24
|
const {
|
|
29
25
|
getEnvironmentVariable,
|
|
@@ -215,10 +211,6 @@ class Config extends ConfigBase {
|
|
|
215
211
|
|
|
216
212
|
warnWrongOtelSettings()
|
|
217
213
|
|
|
218
|
-
if (this.DD_TRACE_GIT_METADATA_ENABLED) {
|
|
219
|
-
this.#loadGitMetadata()
|
|
220
|
-
}
|
|
221
|
-
|
|
222
214
|
parseErrors.clear()
|
|
223
215
|
}
|
|
224
216
|
|
|
@@ -513,7 +505,7 @@ class Config extends ConfigBase {
|
|
|
513
505
|
} else {
|
|
514
506
|
const NX_TASK_TARGET_PROJECT = getEnvironmentVariable('NX_TASK_TARGET_PROJECT')
|
|
515
507
|
if (NX_TASK_TARGET_PROJECT) {
|
|
516
|
-
if (this.DD_ENABLE_NX_SERVICE_NAME) {
|
|
508
|
+
if (DD_MAJOR >= 6 || this.DD_ENABLE_NX_SERVICE_NAME) {
|
|
517
509
|
setAndTrack(this, 'service', normalizeService(NX_TASK_TARGET_PROJECT) || 'node')
|
|
518
510
|
isServiceNameInferred = true
|
|
519
511
|
} else if (DD_MAJOR < 6) {
|
|
@@ -628,52 +620,6 @@ class Config extends ConfigBase {
|
|
|
628
620
|
|
|
629
621
|
telemetry.updateConfig([...configWithOrigin.values()], this)
|
|
630
622
|
}
|
|
631
|
-
|
|
632
|
-
// TODO: Move outside of config. This is unrelated to the config system.
|
|
633
|
-
#loadGitMetadata () {
|
|
634
|
-
// Try to read Git metadata from the environment variables
|
|
635
|
-
this.repositoryUrl = removeUserSensitiveInfo(this.DD_GIT_REPOSITORY_URL ?? this.tags[GIT_REPOSITORY_URL])
|
|
636
|
-
this.commitSHA = this.DD_GIT_COMMIT_SHA ?? this.tags[GIT_COMMIT_SHA]
|
|
637
|
-
|
|
638
|
-
// Otherwise, try to read Git metadata from the git.properties file
|
|
639
|
-
if (!this.repositoryUrl || !this.commitSHA) {
|
|
640
|
-
const DD_GIT_PROPERTIES_FILE = this.DD_GIT_PROPERTIES_FILE
|
|
641
|
-
const gitPropertiesFile = DD_GIT_PROPERTIES_FILE ?? `${process.cwd()}/git.properties`
|
|
642
|
-
try {
|
|
643
|
-
const gitPropertiesString = fs.readFileSync(gitPropertiesFile, 'utf8')
|
|
644
|
-
const { commitSHA, repositoryUrl } = getGitMetadataFromGitProperties(gitPropertiesString)
|
|
645
|
-
this.commitSHA ??= commitSHA
|
|
646
|
-
this.repositoryUrl ??= repositoryUrl
|
|
647
|
-
} catch (error) {
|
|
648
|
-
// Only log error if the user has set a git.properties path
|
|
649
|
-
if (DD_GIT_PROPERTIES_FILE) {
|
|
650
|
-
log.error('Error reading DD_GIT_PROPERTIES_FILE: %s', gitPropertiesFile, error)
|
|
651
|
-
}
|
|
652
|
-
}
|
|
653
|
-
}
|
|
654
|
-
|
|
655
|
-
// Otherwise, try to read Git metadata from the .git/ folder
|
|
656
|
-
const DD_GIT_FOLDER_PATH = this.DD_GIT_FOLDER_PATH
|
|
657
|
-
const gitFolderPath = DD_GIT_FOLDER_PATH ?? path.join(process.cwd(), '.git')
|
|
658
|
-
|
|
659
|
-
if (!this.repositoryUrl) {
|
|
660
|
-
// Try to read git config (repository URL)
|
|
661
|
-
const gitConfigPath = path.join(gitFolderPath, 'config')
|
|
662
|
-
try {
|
|
663
|
-
const gitConfigContent = fs.readFileSync(gitConfigPath, 'utf8')
|
|
664
|
-
if (gitConfigContent) {
|
|
665
|
-
this.repositoryUrl = getRemoteOriginURL(gitConfigContent)
|
|
666
|
-
}
|
|
667
|
-
} catch (error) {
|
|
668
|
-
// Only log error if the user has set a .git/ path
|
|
669
|
-
if (DD_GIT_FOLDER_PATH) {
|
|
670
|
-
log.error('Error reading git config: %s', gitConfigPath, error)
|
|
671
|
-
}
|
|
672
|
-
}
|
|
673
|
-
}
|
|
674
|
-
// Try to read git HEAD (commit SHA)
|
|
675
|
-
this.commitSHA ??= resolveGitHeadSHA(gitFolderPath)
|
|
676
|
-
}
|
|
677
623
|
}
|
|
678
624
|
|
|
679
625
|
/**
|
|
@@ -19,10 +19,9 @@ class DataStreamsCheckpointer {
|
|
|
19
19
|
if (!this.config.dsmEnabled) return
|
|
20
20
|
|
|
21
21
|
const ctx = this.dsmProcessor.setCheckpoint(
|
|
22
|
-
['type:' + type, 'topic:' + target, '
|
|
22
|
+
['direction:out', 'type:' + type, 'topic:' + target, 'manual_checkpoint:true'],
|
|
23
23
|
null,
|
|
24
|
-
DataStreamsContext.getDataStreamsContext()
|
|
25
|
-
null
|
|
24
|
+
DataStreamsContext.getDataStreamsContext()
|
|
26
25
|
)
|
|
27
26
|
DataStreamsContext.setDataStreamsContext(ctx)
|
|
28
27
|
|
|
@@ -45,17 +44,12 @@ class DataStreamsCheckpointer {
|
|
|
45
44
|
const parentCtx = this.tracer.extract('text_map_dsm', carrier)
|
|
46
45
|
DataStreamsContext.setDataStreamsContext(parentCtx)
|
|
47
46
|
|
|
48
|
-
const tags = ['type:' + type, 'topic:' + source
|
|
47
|
+
const tags = ['direction:in', 'type:' + type, 'topic:' + source]
|
|
49
48
|
if (manualCheckpoint) {
|
|
50
49
|
tags.push('manual_checkpoint:true')
|
|
51
50
|
}
|
|
52
51
|
|
|
53
|
-
const ctx = this.dsmProcessor.setCheckpoint(
|
|
54
|
-
tags,
|
|
55
|
-
null,
|
|
56
|
-
parentCtx,
|
|
57
|
-
null
|
|
58
|
-
)
|
|
52
|
+
const ctx = this.dsmProcessor.setCheckpoint(tags, null, parentCtx)
|
|
59
53
|
DataStreamsContext.setDataStreamsContext(ctx)
|
|
60
54
|
|
|
61
55
|
return ctx
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
'use strict'
|
|
2
2
|
|
|
3
|
+
const maxVarLen64 = 9
|
|
4
|
+
|
|
3
5
|
/**
|
|
4
6
|
* Encodes positive and negative numbers, using zig zag encoding to reduce the size of the variable length encoding.
|
|
5
7
|
* Uses high and low part to ensure those parts are under the limit for byte operations in javascript (32 bits)
|
|
@@ -8,15 +10,45 @@
|
|
|
8
10
|
* @returns {Uint8Array|undefined}
|
|
9
11
|
*/
|
|
10
12
|
function encodeVarint (v) {
|
|
11
|
-
const
|
|
13
|
+
const result = new Uint8Array(maxVarLen64)
|
|
14
|
+
const written = encodeVarintInto(result, 0, v)
|
|
15
|
+
if (written === 0) {
|
|
16
|
+
return
|
|
17
|
+
}
|
|
18
|
+
return result.slice(0, written)
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Writes a zig-zag varint at `target[offset..]` and returns the offset just past the last
|
|
23
|
+
* byte written. Returns `offset` unchanged when the value exceeds MAX_SAFE_INTEGER/2, mirroring
|
|
24
|
+
* the `encodeVarint` overflow contract. Used on the DSM checkpoint hot path to avoid
|
|
25
|
+
* per-call Uint8Array / Buffer allocations.
|
|
26
|
+
* @param {Uint8Array | Buffer} target
|
|
27
|
+
* @param {number} offset
|
|
28
|
+
* @param {number} value
|
|
29
|
+
* @returns {number}
|
|
30
|
+
*/
|
|
31
|
+
function encodeVarintInto (target, offset, value) {
|
|
32
|
+
const sign = value >= 0 ? 0 : 1
|
|
12
33
|
// We leave the least significant bit for the sign.
|
|
13
|
-
const double = Math.abs(
|
|
34
|
+
const double = Math.abs(value) * 2
|
|
14
35
|
if (double > Number.MAX_SAFE_INTEGER) {
|
|
15
|
-
return
|
|
36
|
+
return offset
|
|
16
37
|
}
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
38
|
+
let high = Math.floor(double / 0x1_00_00_00_00)
|
|
39
|
+
let low = (double & 0xFF_FF_FF_FF) | sign
|
|
40
|
+
let i = offset
|
|
41
|
+
const limit = offset + maxVarLen64 - 1
|
|
42
|
+
// if first byte is 1, the number is negative in javascript, but we want to interpret it as positive
|
|
43
|
+
while ((high !== 0 || low < 0 || low > 0x80) && i < limit) {
|
|
44
|
+
target[i] = (low & 0x7F) | 0x80
|
|
45
|
+
low >>>= 7
|
|
46
|
+
low |= (high & 0x7F) << 25
|
|
47
|
+
high >>>= 7
|
|
48
|
+
i++
|
|
49
|
+
}
|
|
50
|
+
target[i] = low & 0x7F
|
|
51
|
+
return i + 1
|
|
20
52
|
}
|
|
21
53
|
|
|
22
54
|
/**
|
|
@@ -35,28 +67,6 @@ function decodeVarint (b) {
|
|
|
35
67
|
return [positive ? abs : -abs, bytes]
|
|
36
68
|
}
|
|
37
69
|
|
|
38
|
-
const maxVarLen64 = 9
|
|
39
|
-
|
|
40
|
-
/**
|
|
41
|
-
* @param {number} low
|
|
42
|
-
* @param {number} high
|
|
43
|
-
* @returns {Uint8Array}
|
|
44
|
-
*/
|
|
45
|
-
function encodeUvarint64 (low, high) {
|
|
46
|
-
const result = new Uint8Array(maxVarLen64)
|
|
47
|
-
let i = 0
|
|
48
|
-
// if first byte is 1, the number is negative in javascript, but we want to interpret it as positive
|
|
49
|
-
while ((high !== 0 || low < 0 || low > 0x80) && i < maxVarLen64 - 1) {
|
|
50
|
-
result[i] = (low & 0x7F) | 0x80
|
|
51
|
-
low >>>= 7
|
|
52
|
-
low |= (high & 0x7F) << 25
|
|
53
|
-
high >>>= 7
|
|
54
|
-
i++
|
|
55
|
-
}
|
|
56
|
-
result[i] = low & 0x7F
|
|
57
|
-
return result.slice(0, i + 1)
|
|
58
|
-
}
|
|
59
|
-
|
|
60
70
|
/**
|
|
61
71
|
* @param {Uint8Array} bytes
|
|
62
72
|
* @returns {[number|undefined, number|undefined, Uint8Array]}
|
|
@@ -95,5 +105,6 @@ function decodeUvarint64 (
|
|
|
95
105
|
|
|
96
106
|
module.exports = {
|
|
97
107
|
encodeVarint,
|
|
108
|
+
encodeVarintInto,
|
|
98
109
|
decodeVarint,
|
|
99
110
|
}
|
|
@@ -7,18 +7,24 @@ const crypto = require('crypto')
|
|
|
7
7
|
const { LRUCache } = require('../../../../vendor/dist/lru-cache')
|
|
8
8
|
const log = require('../log')
|
|
9
9
|
const pick = require('../../../datadog-core/src/utils/src/pick')
|
|
10
|
-
const {
|
|
10
|
+
const { encodeVarintInto, decodeVarint } = require('./encoding')
|
|
11
11
|
|
|
12
12
|
const cache = new LRUCache({ max: 500 })
|
|
13
13
|
|
|
14
14
|
const CONTEXT_PROPAGATION_KEY = 'dd-pathway-ctx'
|
|
15
15
|
const CONTEXT_PROPAGATION_KEY_BASE64 = 'dd-pathway-ctx-base64'
|
|
16
16
|
|
|
17
|
+
const PATHWAY_CONTEXT_BYTES = 20
|
|
18
|
+
|
|
19
|
+
// Reused across `encodePathwayContext` calls; the buffer is fully rewritten before each
|
|
20
|
+
// `Buffer.from(...)` copy-out so callers never observe mutation between checkpoints.
|
|
21
|
+
const pathwayScratch = Buffer.allocUnsafe(PATHWAY_CONTEXT_BYTES)
|
|
22
|
+
|
|
17
23
|
const logKeys = [CONTEXT_PROPAGATION_KEY, CONTEXT_PROPAGATION_KEY_BASE64]
|
|
18
24
|
|
|
19
25
|
function shaHash (checkpointString) {
|
|
20
|
-
|
|
21
|
-
return Buffer.from(
|
|
26
|
+
// Copy out of the 32-byte digest so the LRU cache doesn't retain it.
|
|
27
|
+
return Buffer.from(crypto.createHash('sha256').update(checkpointString).digest().subarray(0, 8))
|
|
22
28
|
}
|
|
23
29
|
|
|
24
30
|
/**
|
|
@@ -30,30 +36,25 @@ function shaHash (checkpointString) {
|
|
|
30
36
|
*/
|
|
31
37
|
function computeHash (service, env, edgeTags, parentHash, propagationHashBigInt = null) {
|
|
32
38
|
edgeTags.sort()
|
|
33
|
-
const hashableEdgeTags = edgeTags.
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
//
|
|
38
|
-
|
|
39
|
-
|
|
39
|
+
const hashableEdgeTags = edgeTags.includes('manual_checkpoint:true')
|
|
40
|
+
? edgeTags.filter(item => item !== 'manual_checkpoint:true')
|
|
41
|
+
: edgeTags
|
|
42
|
+
|
|
43
|
+
// The cache key includes parentHash so a fan-in node with different parents
|
|
44
|
+
// gets distinct cache entries; the hash input below excludes parentHash and
|
|
45
|
+
// gets combined with it via a second sha pass to produce the final hash.
|
|
46
|
+
const joinedEdgeTags = hashableEdgeTags.join('')
|
|
47
|
+
const propagationHex = propagationHashBigInt ? propagationHashBigInt.toString(16) : ''
|
|
48
|
+
const propagationPart = propagationHex ? `:${propagationHex}` : ''
|
|
49
|
+
const key = `${service}${env}${joinedEdgeTags}${parentHash}${propagationPart}`
|
|
40
50
|
|
|
41
51
|
let value = cache.get(key)
|
|
42
52
|
if (value) {
|
|
43
53
|
return value
|
|
44
54
|
}
|
|
45
55
|
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
// with the same node but different parents (e.g., multiple queues feeding one consumer)
|
|
49
|
-
// - 'hashInput' (below) excludes parentHash to compute only the current node's identity hash,
|
|
50
|
-
// which is then XORed with parentHash (line 54) to build the complete pathway hash
|
|
51
|
-
// This two-step approach (hash current node independently, then combine with parent) is
|
|
52
|
-
// required for proper pathway construction in the DSM protocol.
|
|
53
|
-
const baseString = `${service}${env}` + hashableEdgeTags.join('')
|
|
54
|
-
const hashInput = propagationHashBigInt
|
|
55
|
-
? `${baseString}:${propagationHashBigInt.toString(16)}`
|
|
56
|
-
: baseString
|
|
56
|
+
const baseString = `${service}${env}${joinedEdgeTags}`
|
|
57
|
+
const hashInput = propagationHex ? `${baseString}:${propagationHex}` : baseString
|
|
57
58
|
|
|
58
59
|
const currentHash = shaHash(hashInput)
|
|
59
60
|
const buf = Buffer.concat([currentHash, parentHash], 16)
|
|
@@ -70,11 +71,12 @@ function computeHash (service, env, edgeTags, parentHash, propagationHashBigInt
|
|
|
70
71
|
* @returns {Buffer}
|
|
71
72
|
*/
|
|
72
73
|
function encodePathwayContext (dataStreamsContext) {
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
74
|
+
let offset = dataStreamsContext.hash.copy(pathwayScratch, 0)
|
|
75
|
+
offset = encodeVarintInto(pathwayScratch, offset, Math.round(dataStreamsContext.pathwayStartNs / 1e6))
|
|
76
|
+
offset = encodeVarintInto(pathwayScratch, offset, Math.round(dataStreamsContext.edgeStartNs / 1e6))
|
|
77
|
+
// No-op when offset >= PATHWAY_CONTEXT_BYTES; otherwise pads stale bytes from a previous call.
|
|
78
|
+
pathwayScratch.fill(0, offset, PATHWAY_CONTEXT_BYTES)
|
|
79
|
+
return Buffer.from(pathwayScratch.subarray(0, PATHWAY_CONTEXT_BYTES))
|
|
78
80
|
}
|
|
79
81
|
|
|
80
82
|
/**
|
|
@@ -178,6 +180,7 @@ const DsmPathwayCodec = {
|
|
|
178
180
|
}
|
|
179
181
|
|
|
180
182
|
module.exports = {
|
|
183
|
+
CONTEXT_PROPAGATION_KEY_BASE64,
|
|
181
184
|
computePathwayHash: computeHash,
|
|
182
185
|
encodePathwayContext,
|
|
183
186
|
decodePathwayContext,
|
|
@@ -8,15 +8,21 @@ const { PATHWAY_HASH, DSM_TRANSACTION_ID, DSM_TRANSACTION_CHECKPOINT } = require
|
|
|
8
8
|
const log = require('../log')
|
|
9
9
|
const processTags = require('../process-tags')
|
|
10
10
|
const propagationHash = require('../propagation-hash')
|
|
11
|
-
const {
|
|
11
|
+
const { CONTEXT_PROPAGATION_KEY_BASE64, computePathwayHash } = require('./pathway')
|
|
12
12
|
const { DataStreamsWriter } = require('./writer')
|
|
13
|
-
const { computePathwayHash } = require('./pathway')
|
|
14
13
|
const { getAmqpMessageSize, getHeadersSize, getMessageSize, getSizeOrZero } = require('./size')
|
|
15
14
|
const { SchemaBuilder } = require('./schemas/schema_builder')
|
|
16
15
|
const { SchemaSampler } = require('./schemas/schema_sampler')
|
|
17
16
|
|
|
18
17
|
const ENTRY_PARENT_HASH = Buffer.from('0000000000000000', 'hex')
|
|
19
18
|
|
|
19
|
+
// A direction:out checkpoint estimates the size cost of the header the
|
|
20
|
+
// producer plugin will inject. The pathway context is always 20 binary
|
|
21
|
+
// bytes, encoded as 28 base64 chars; together with the header key and
|
|
22
|
+
// JSON framing (matching the prior `JSON.stringify({key: value})` byte
|
|
23
|
+
// count minus 1), this is a fixed value.
|
|
24
|
+
const PATHWAY_HEADER_BYTES = CONTEXT_PROPAGATION_KEY_BASE64.length + 28 + 6
|
|
25
|
+
|
|
20
26
|
class StatsPoint {
|
|
21
27
|
constructor (hash, parentHash, edgeTags) {
|
|
22
28
|
this.hash = hash.readBigUInt64LE()
|
|
@@ -271,19 +277,19 @@ class DataStreamsProcessor {
|
|
|
271
277
|
|
|
272
278
|
recordCheckpoint (checkpoint, span = null) {
|
|
273
279
|
if (!this.enabled) return
|
|
274
|
-
this.bucketFromTimestamp(checkpoint.currentTimestamp)
|
|
275
|
-
|
|
276
|
-
.addLatencies(checkpoint)
|
|
277
|
-
// set DSM pathway hash on span to enable related traces feature on DSM tab, convert from buffer to uint64
|
|
280
|
+
const statsPoint = this.bucketFromTimestamp(checkpoint.currentTimestamp).forCheckpoint(checkpoint)
|
|
281
|
+
statsPoint.addLatencies(checkpoint)
|
|
278
282
|
if (span) {
|
|
279
|
-
|
|
283
|
+
// StatsPoint already converted the 8-byte Buffer hash to a uint64 BigInt.
|
|
284
|
+
span.setTag(PATHWAY_HASH, statsPoint.hash.toString())
|
|
280
285
|
}
|
|
281
286
|
}
|
|
282
287
|
|
|
283
|
-
setCheckpoint (edgeTags, span, ctx
|
|
284
|
-
if (!this.enabled) return
|
|
288
|
+
setCheckpoint (edgeTags, span, ctx, payloadSize = 0) {
|
|
289
|
+
if (!this.enabled) return
|
|
285
290
|
const nowNs = Date.now() * 1e6
|
|
286
|
-
|
|
291
|
+
// Callers must place the direction tag at index 0.
|
|
292
|
+
const direction = edgeTags[0]
|
|
287
293
|
let pathwayStartNs = nowNs
|
|
288
294
|
let edgeStartNs = nowNs
|
|
289
295
|
let parentHash = ENTRY_PARENT_HASH
|
|
@@ -334,11 +340,7 @@ class DataStreamsProcessor {
|
|
|
334
340
|
closestOppositeDirectionEdgeStart,
|
|
335
341
|
}
|
|
336
342
|
if (direction === 'direction:out') {
|
|
337
|
-
|
|
338
|
-
// - 1 to account for extra byte for {
|
|
339
|
-
const ddInfoContinued = {}
|
|
340
|
-
DsmPathwayCodec.encode(dataStreamsContext, ddInfoContinued)
|
|
341
|
-
payloadSize += getSizeOrZero(JSON.stringify(ddInfoContinued)) - 1
|
|
343
|
+
payloadSize += PATHWAY_HEADER_BYTES
|
|
342
344
|
}
|
|
343
345
|
const checkpoint = {
|
|
344
346
|
currentTimestamp: nowNs,
|
|
@@ -4,7 +4,7 @@ const { types } = require('util')
|
|
|
4
4
|
|
|
5
5
|
function getSizeOrZero (obj) {
|
|
6
6
|
if (typeof obj === 'string') {
|
|
7
|
-
return Buffer.
|
|
7
|
+
return Buffer.byteLength(obj, 'utf8')
|
|
8
8
|
}
|
|
9
9
|
if (types.isArrayBuffer(obj)) {
|
|
10
10
|
return obj.byteLength
|
|
@@ -32,7 +32,11 @@ function getSizeOrZero (obj) {
|
|
|
32
32
|
|
|
33
33
|
function getHeadersSize (headers) {
|
|
34
34
|
if (headers === undefined) return 0
|
|
35
|
-
|
|
35
|
+
let size = 0
|
|
36
|
+
for (const key of Object.keys(headers)) {
|
|
37
|
+
size += Buffer.byteLength(key, 'utf8') + getSizeOrZero(headers[key])
|
|
38
|
+
}
|
|
39
|
+
return size
|
|
36
40
|
}
|
|
37
41
|
|
|
38
42
|
function getMessageSize (message) {
|
|
@@ -1,8 +1,11 @@
|
|
|
1
1
|
'use strict'
|
|
2
2
|
|
|
3
|
+
const getGitMetadata = require('../git_metadata')
|
|
4
|
+
|
|
3
5
|
module.exports = function getDebuggerConfig (config, inputPath) {
|
|
6
|
+
const { commitSHA, repositoryUrl } = getGitMetadata(config)
|
|
4
7
|
return {
|
|
5
|
-
commitSHA
|
|
8
|
+
commitSHA,
|
|
6
9
|
debug: config.debug,
|
|
7
10
|
dynamicInstrumentation: config.dynamicInstrumentation,
|
|
8
11
|
env: config.env,
|
|
@@ -10,7 +13,7 @@ module.exports = function getDebuggerConfig (config, inputPath) {
|
|
|
10
13
|
logLevel: config.logLevel,
|
|
11
14
|
port: config.port,
|
|
12
15
|
propagateProcessTags: { enabled: config.DD_EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED },
|
|
13
|
-
repositoryUrl
|
|
16
|
+
repositoryUrl,
|
|
14
17
|
runtimeId: config.tags['runtime-id'],
|
|
15
18
|
service: config.service,
|
|
16
19
|
url: config.url?.toString(),
|
|
@@ -247,10 +247,6 @@ session.on('Debugger.paused', async ({ params }) => {
|
|
|
247
247
|
language: 'javascript',
|
|
248
248
|
}
|
|
249
249
|
|
|
250
|
-
if (config.propagateProcessTags.enabled) {
|
|
251
|
-
snapshot[processTags.DYNAMIC_INSTRUMENTATION_FIELD_NAME] = processTags.tagsObject
|
|
252
|
-
}
|
|
253
|
-
|
|
254
250
|
if (probe.captureSnapshot) {
|
|
255
251
|
if (fatalSnapshotErrors && fatalSnapshotErrors.length > 0) {
|
|
256
252
|
// There was an error collecting the snapshot for this probe, let's not try again
|
|
@@ -327,7 +323,8 @@ session.on('Debugger.paused', async ({ params }) => {
|
|
|
327
323
|
|
|
328
324
|
ackEmitting(probe)
|
|
329
325
|
|
|
330
|
-
send(message, logger, dd, snapshot
|
|
326
|
+
send(message, logger, dd, snapshot,
|
|
327
|
+
config.propagateProcessTags.enabled ? processTags.serialized : undefined)
|
|
331
328
|
}
|
|
332
329
|
})
|
|
333
330
|
|
|
@@ -40,7 +40,7 @@ const jsonBuffer = new JSONBuffer({
|
|
|
40
40
|
onFlush,
|
|
41
41
|
})
|
|
42
42
|
|
|
43
|
-
function send (message, logger, dd, snapshot) {
|
|
43
|
+
function send (message, logger, dd, snapshot, processTags) {
|
|
44
44
|
const payload = {
|
|
45
45
|
ddsource,
|
|
46
46
|
hostname,
|
|
@@ -50,6 +50,7 @@ function send (message, logger, dd, snapshot) {
|
|
|
50
50
|
: message,
|
|
51
51
|
logger,
|
|
52
52
|
dd,
|
|
53
|
+
process_tags: processTags,
|
|
53
54
|
debugger: { snapshot },
|
|
54
55
|
}
|
|
55
56
|
|