dd-trace 3.25.0 → 3.26.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/LICENSE-3rdparty.csv +4 -3
  2. package/package.json +4 -4
  3. package/packages/datadog-instrumentations/src/aws-sdk.js +5 -0
  4. package/packages/datadog-instrumentations/src/cassandra-driver.js +6 -3
  5. package/packages/datadog-instrumentations/src/elasticsearch.js +39 -1
  6. package/packages/datadog-instrumentations/src/helpers/hooks.js +1 -0
  7. package/packages/datadog-instrumentations/src/kafkajs.js +2 -2
  8. package/packages/datadog-instrumentations/src/opensearch.js +2 -1
  9. package/packages/datadog-plugin-aws-sdk/src/base.js +3 -3
  10. package/packages/datadog-plugin-aws-sdk/src/services/dynamodb.js +1 -0
  11. package/packages/datadog-plugin-aws-sdk/src/services/kinesis.js +1 -0
  12. package/packages/datadog-plugin-aws-sdk/src/services/s3.js +1 -0
  13. package/packages/datadog-plugin-aws-sdk/src/services/sns.js +1 -0
  14. package/packages/datadog-plugin-aws-sdk/src/services/sqs.js +1 -0
  15. package/packages/datadog-plugin-cassandra-driver/src/index.js +4 -4
  16. package/packages/datadog-plugin-grpc/src/client.js +8 -2
  17. package/packages/datadog-plugin-grpc/src/server.js +2 -2
  18. package/packages/datadog-plugin-kafkajs/src/consumer.js +6 -1
  19. package/packages/datadog-plugin-kafkajs/src/producer.js +8 -1
  20. package/packages/datadog-plugin-mongodb-core/src/index.js +13 -2
  21. package/packages/datadog-plugin-openai/src/index.js +9 -2
  22. package/packages/datadog-plugin-oracledb/src/index.js +1 -0
  23. package/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js +6 -5
  24. package/packages/dd-trace/src/config.js +11 -0
  25. package/packages/dd-trace/src/data_streams_context.js +15 -0
  26. package/packages/dd-trace/src/datastreams/pathway.js +58 -0
  27. package/packages/dd-trace/src/datastreams/processor.js +194 -0
  28. package/packages/dd-trace/src/datastreams/writer.js +66 -0
  29. package/packages/dd-trace/src/plugin_manager.js +6 -1
  30. package/packages/dd-trace/src/plugins/database.js +2 -1
  31. package/packages/dd-trace/src/plugins/index.js +1 -0
  32. package/packages/dd-trace/src/plugins/outbound.js +2 -1
  33. package/packages/dd-trace/src/plugins/tracing.js +3 -0
  34. package/packages/dd-trace/src/plugins/util/git.js +37 -5
  35. package/packages/dd-trace/src/plugins/util/user-provided-git.js +36 -2
  36. package/packages/dd-trace/src/profiling/config.js +32 -5
  37. package/packages/dd-trace/src/service-naming/index.js +13 -1
  38. package/packages/dd-trace/src/service-naming/schemas/v0/web.js +9 -0
  39. package/packages/dd-trace/src/service-naming/schemas/v1/web.js +8 -0
  40. package/packages/dd-trace/src/telemetry/metrics.js +76 -20
  41. package/packages/dd-trace/src/tracer.js +19 -1
@@ -0,0 +1,58 @@
1
+ // encoding used here is sha256
2
+ // other languages use FNV1
3
+ // this inconsistency is ok because hashes do not need to be consistent across services
4
+ const crypto = require('crypto')
5
+ const { encodeVarint, decodeVarint } = require('./encoding')
6
+ const LRUCache = require('lru-cache')
7
+
8
+ const options = { max: 500 }
9
+ const cache = new LRUCache(options)
10
+
11
+ function shaHash (checkpointString) {
12
+ const hash = crypto.createHash('md5').update(checkpointString).digest('hex').slice(0, 16)
13
+ return Buffer.from(hash, 'hex')
14
+ }
15
+
16
+ function computeHash (service, env, edgeTags, parentHash) {
17
+ const key = `${service}${env}` + edgeTags.join('') + parentHash.toString()
18
+ if (cache.get(key)) {
19
+ return cache.get(key)
20
+ }
21
+ const currentHash = shaHash(`${service}${env}` + edgeTags.join(''))
22
+ const buf = Buffer.concat([ currentHash, parentHash ], 16)
23
+ const val = shaHash(buf.toString())
24
+ cache.set(key, val)
25
+ return val
26
+ }
27
+
28
+ function encodePathwayContext (dataStreamsContext) {
29
+ return Buffer.concat([
30
+ dataStreamsContext.hash,
31
+ Buffer.from(encodeVarint(Math.round(dataStreamsContext.pathwayStartNs / 1e6))),
32
+ Buffer.from(encodeVarint(Math.round(dataStreamsContext.edgeStartNs / 1e6)))
33
+ ], 20)
34
+ }
35
+
36
+ function decodePathwayContext (pathwayContext) {
37
+ if (pathwayContext == null || pathwayContext.length < 8) {
38
+ return null
39
+ }
40
+ // hash and parent hash are in LE
41
+ const pathwayHash = pathwayContext.subarray(0, 8)
42
+ const encodedTimestamps = pathwayContext.subarray(8)
43
+ const [pathwayStartMs, encodedTimeSincePrev] = decodeVarint(encodedTimestamps)
44
+ if (pathwayStartMs === undefined) {
45
+ return null
46
+ }
47
+ const [edgeStartMs] = decodeVarint(encodedTimeSincePrev)
48
+ if (edgeStartMs === undefined) {
49
+ return null
50
+ }
51
+ return { hash: pathwayHash, pathwayStartNs: pathwayStartMs * 1e6, edgeStartNs: edgeStartMs * 1e6 }
52
+ }
53
+
54
+ module.exports = {
55
+ computePathwayHash: computeHash,
56
+ encodePathwayContext,
57
+ decodePathwayContext
58
+ }
@@ -0,0 +1,194 @@
1
+ const os = require('os')
2
+ const pkg = require('../../../../package.json')
3
+ // Message pack int encoding is done in big endian, but data streams uses little endian
4
+ const Uint64 = require('int64-buffer').Uint64BE
5
+
6
+ const { LogCollapsingLowestDenseDDSketch } = require('@datadog/sketches-js')
7
+
8
+ const { DataStreamsWriter } = require('./writer')
9
+ const { computePathwayHash } = require('./pathway')
10
+ const ENTRY_PARENT_HASH = Buffer.from('0000000000000000', 'hex')
11
+
12
+ const HIGH_ACCURACY_DISTRIBUTION = 0.0075
13
+
14
+ class StatsPoint {
15
+ constructor (hash, parentHash, edgeTags) {
16
+ this.hash = new Uint64(hash)
17
+ this.parentHash = new Uint64(parentHash)
18
+ this.edgeTags = edgeTags
19
+ this.edgeLatency = new LogCollapsingLowestDenseDDSketch(HIGH_ACCURACY_DISTRIBUTION)
20
+ this.pathwayLatency = new LogCollapsingLowestDenseDDSketch(HIGH_ACCURACY_DISTRIBUTION)
21
+ }
22
+
23
+ addLatencies (checkpoint) {
24
+ const edgeLatencySec = checkpoint.edgeLatencyNs / 1e9
25
+ const pathwayLatencySec = checkpoint.pathwayLatencyNs / 1e9
26
+ this.edgeLatency.accept(edgeLatencySec)
27
+ this.pathwayLatency.accept(pathwayLatencySec)
28
+ }
29
+
30
+ encode () {
31
+ return {
32
+ Hash: this.hash,
33
+ ParentHash: this.parentHash,
34
+ EdgeTags: this.edgeTags,
35
+ EdgeLatency: this.edgeLatency.toProto(),
36
+ PathwayLatency: this.pathwayLatency.toProto()
37
+ }
38
+ }
39
+ }
40
+
41
+ class StatsBucket extends Map {
42
+ forCheckpoint (checkpoint) {
43
+ const key = checkpoint.hash
44
+ if (!this.has(key)) {
45
+ this.set(key, new StatsPoint(checkpoint.hash, checkpoint.parentHash, checkpoint.edgeTags)) // StatsPoint
46
+ }
47
+
48
+ return this.get(key)
49
+ }
50
+ }
51
+
52
+ class TimeBuckets extends Map {
53
+ forTime (time) {
54
+ if (!this.has(time)) {
55
+ this.set(time, new StatsBucket())
56
+ }
57
+
58
+ return this.get(time)
59
+ }
60
+ }
61
+
62
+ class DataStreamsProcessor {
63
+ constructor ({
64
+ dsmEnabled,
65
+ hostname,
66
+ port,
67
+ url,
68
+ env,
69
+ tags
70
+ } = {}) {
71
+ this.writer = new DataStreamsWriter({
72
+ hostname,
73
+ port,
74
+ url
75
+ })
76
+ this.bucketSizeNs = 1e10
77
+ this.buckets = new TimeBuckets()
78
+ this.hostname = os.hostname()
79
+ this.enabled = dsmEnabled
80
+ this.env = env
81
+ this.tags = tags || {}
82
+ this.service = this.tags.service || 'unnamed-nodejs-service'
83
+ this.sequence = 0
84
+
85
+ if (this.enabled) {
86
+ this.timer = setInterval(this.onInterval.bind(this), 10000)
87
+ this.timer.unref()
88
+ }
89
+ }
90
+
91
+ onInterval () {
92
+ const serialized = this._serializeBuckets()
93
+ if (!serialized) return
94
+ const payload = {
95
+ Env: this.env,
96
+ Service: this.service,
97
+ Stats: serialized,
98
+ TracerVersion: pkg.version,
99
+ Lang: 'javascript'
100
+ }
101
+ this.writer.flush(payload)
102
+ }
103
+
104
+ recordCheckpoint (checkpoint) {
105
+ if (!this.enabled) return
106
+ const bucketTime = Math.round(checkpoint.currentTimestamp - (checkpoint.currentTimestamp % this.bucketSizeNs))
107
+ this.buckets.forTime(bucketTime)
108
+ .forCheckpoint(checkpoint)
109
+ .addLatencies(checkpoint)
110
+ }
111
+
112
+ setCheckpoint (edgeTags, ctx = null) {
113
+ if (!this.enabled) return null
114
+ const nowNs = Date.now() * 1e6
115
+ const direction = edgeTags.find(t => t.startsWith('direction:'))
116
+ let pathwayStartNs = nowNs
117
+ let edgeStartNs = nowNs
118
+ let parentHash = ENTRY_PARENT_HASH
119
+ let closestOppositeDirectionHash = ENTRY_PARENT_HASH
120
+ let closestOppositeDirectionEdgeStart = nowNs
121
+ if (ctx != null) {
122
+ pathwayStartNs = ctx.pathwayStartNs
123
+ edgeStartNs = ctx.edgeStartNs
124
+ parentHash = ctx.hash
125
+ closestOppositeDirectionHash = ctx.closestOppositeDirectionHash || ENTRY_PARENT_HASH
126
+ closestOppositeDirectionEdgeStart = ctx.closestOppositeDirectionEdgeStart || nowNs
127
+ if (direction === ctx.previousDirection) {
128
+ parentHash = ctx.closestOppositeDirectionHash
129
+ if (parentHash === ENTRY_PARENT_HASH) {
130
+ // if the closest hash from opposite direction is the entry hash, that means
131
+ // we produce in a loop, without consuming
132
+ // in that case, we don't want the pathway to be longer and longer, but we want to restart a new pathway.
133
+ edgeStartNs = nowNs
134
+ pathwayStartNs = nowNs
135
+ } else {
136
+ edgeStartNs = ctx.closestOppositeDirectionEdgeStart
137
+ }
138
+ } else {
139
+ closestOppositeDirectionHash = parentHash
140
+ closestOppositeDirectionEdgeStart = edgeStartNs
141
+ }
142
+ }
143
+ const hash = computePathwayHash(this.service, this.env, edgeTags, parentHash)
144
+ const edgeLatencyNs = nowNs - edgeStartNs
145
+ const pathwayLatencyNs = nowNs - pathwayStartNs
146
+ const checkpoint = {
147
+ currentTimestamp: nowNs,
148
+ parentHash: parentHash,
149
+ hash: hash,
150
+ edgeTags: edgeTags,
151
+ edgeLatencyNs: edgeLatencyNs,
152
+ pathwayLatencyNs: pathwayLatencyNs
153
+ }
154
+ this.recordCheckpoint(checkpoint)
155
+ return {
156
+ hash: hash,
157
+ edgeStartNs: edgeStartNs,
158
+ pathwayStartNs: pathwayStartNs,
159
+ previousDirection: direction,
160
+ closestOppositeDirectionHash: closestOppositeDirectionHash,
161
+ closestOppositeDirectionEdgeStart: closestOppositeDirectionEdgeStart
162
+ }
163
+ }
164
+
165
+ _serializeBuckets () {
166
+ const serializedBuckets = []
167
+
168
+ for (const [ timeNs, bucket ] of this.buckets.entries()) {
169
+ const points = []
170
+
171
+ for (const stats of bucket.values()) {
172
+ points.push(stats.encode())
173
+ }
174
+
175
+ serializedBuckets.push({
176
+ Start: new Uint64(timeNs),
177
+ Duration: new Uint64(this.bucketSizeNs),
178
+ Stats: points
179
+ })
180
+ }
181
+
182
+ this.buckets.clear()
183
+
184
+ return serializedBuckets
185
+ }
186
+ }
187
+
188
+ module.exports = {
189
+ DataStreamsProcessor: DataStreamsProcessor,
190
+ StatsPoint: StatsPoint,
191
+ StatsBucket: StatsBucket,
192
+ TimeBuckets,
193
+ ENTRY_PARENT_HASH
194
+ }
@@ -0,0 +1,66 @@
1
+ const pkg = require('../../../../package.json')
2
+ const log = require('../log')
3
+ const request = require('../exporters/common/request')
4
+ const { URL, format } = require('url')
5
+ const msgpack = require('msgpack-lite')
6
+ const zlib = require('zlib')
7
+ const codec = msgpack.createCodec({ int64: true })
8
+
9
+ function makeRequest (data, url, cb) {
10
+ const options = {
11
+ path: '/v0.1/pipeline_stats',
12
+ method: 'POST',
13
+ headers: {
14
+ 'Datadog-Meta-Lang': 'javascript',
15
+ 'Datadog-Meta-Tracer-Version': pkg.version,
16
+ 'Content-Type': 'application/msgpack',
17
+ 'Content-Encoding': 'gzip'
18
+ }
19
+ }
20
+
21
+ options.protocol = url.protocol
22
+ options.hostname = url.hostname
23
+ options.port = url.port
24
+
25
+ log.debug(() => `Request to the intake: ${JSON.stringify(options)}`)
26
+
27
+ request(data, options, (err, res) => {
28
+ cb(err, res)
29
+ })
30
+ }
31
+
32
+ class DataStreamsWriter {
33
+ constructor (config) {
34
+ const { hostname = '127.0.0.1', port = 8126, url } = config
35
+ this._url = url || new URL(format({
36
+ protocol: 'http:',
37
+ hostname: hostname || 'localhost',
38
+ port
39
+ }))
40
+ }
41
+
42
+ flush (payload) {
43
+ if (!request.writable) {
44
+ log.debug(() => `Maximum number of active requests reached. Payload discarded: ${JSON.stringify(payload)}`)
45
+ return
46
+ }
47
+ const encodedPayload = msgpack.encode(payload, { codec })
48
+
49
+ zlib.gzip(encodedPayload, { level: 1 }, (err, compressedData) => {
50
+ if (err) {
51
+ log.error(err)
52
+ return
53
+ }
54
+ makeRequest(compressedData, this._url, (err, res) => {
55
+ log.debug(`Response from the agent: ${res}`)
56
+ if (err) {
57
+ log.error(err)
58
+ }
59
+ })
60
+ })
61
+ }
62
+ }
63
+
64
+ module.exports = {
65
+ DataStreamsWriter
66
+ }
@@ -133,7 +133,9 @@ module.exports = class PluginManager {
133
133
  serviceMapping,
134
134
  queryStringObfuscation,
135
135
  site,
136
- url
136
+ url,
137
+ dbmPropagationMode,
138
+ dsmEnabled
137
139
  } = this._tracerConfig
138
140
 
139
141
  const sharedConfig = {}
@@ -146,6 +148,9 @@ module.exports = class PluginManager {
146
148
  sharedConfig.queryStringObfuscation = queryStringObfuscation
147
149
  }
148
150
 
151
+ sharedConfig.dbmPropagationMode = dbmPropagationMode
152
+ sharedConfig.dsmEnabled = dsmEnabled
153
+
149
154
  if (serviceMapping && serviceMapping[name]) {
150
155
  sharedConfig.service = serviceMapping[name]
151
156
  }
@@ -4,6 +4,7 @@ const StoragePlugin = require('./storage')
4
4
 
5
5
  class DatabasePlugin extends StoragePlugin {
6
6
  static get operation () { return 'query' }
7
+ static get peerServicePrecursors () { return ['db.name'] }
7
8
 
8
9
  constructor (...args) {
9
10
  super(...args)
@@ -38,7 +39,7 @@ class DatabasePlugin extends StoragePlugin {
38
39
  }
39
40
 
40
41
  injectDbmQuery (query, serviceName, isPreparedStatement = false) {
41
- const mode = this.config.dbmPropagationMode || this._tracerConfig.dbmPropagationMode
42
+ const mode = this.config.dbmPropagationMode
42
43
 
43
44
  if (mode === 'disabled') {
44
45
  return query
@@ -14,6 +14,7 @@ module.exports = {
14
14
  get '@node-redis/client' () { return require('../../../datadog-plugin-redis/src') },
15
15
  get '@opensearch-project/opensearch' () { return require('../../../datadog-plugin-opensearch/src') },
16
16
  get '@redis/client' () { return require('../../../datadog-plugin-redis/src') },
17
+ get '@smithy/smithy-client' () { return require('../../../datadog-plugin-aws-sdk/src') },
17
18
  get 'amqp10' () { return require('../../../datadog-plugin-amqp10/src') },
18
19
  get 'amqplib' () { return require('../../../datadog-plugin-amqplib/src') },
19
20
  get 'aws-sdk' () { return require('../../../datadog-plugin-aws-sdk/src') },
@@ -35,7 +35,7 @@ class OutboundPlugin extends TracingPlugin {
35
35
  * `_dd.peer.service.source`'s value is `peer.service`
36
36
  */
37
37
 
38
- if (tags['peer.service']) {
38
+ if (tags['peer.service'] !== undefined) {
39
39
  return { [PEER_SERVICE_SOURCE_KEY]: 'peer.service' }
40
40
  }
41
41
 
@@ -43,6 +43,7 @@ class OutboundPlugin extends TracingPlugin {
43
43
  ...this.constructor.peerServicePrecursors,
44
44
  ...COMMON_PEER_SVC_SOURCE_TAGS
45
45
  ]
46
+
46
47
  for (const sourceTag of sourceTags) {
47
48
  if (tags[sourceTag]) {
48
49
  return {
@@ -33,6 +33,9 @@ class TracingPlugin extends Plugin {
33
33
  }
34
34
 
35
35
  serviceName (...serviceArgs) {
36
+ if (Nomenclature.shouldUseConsistentServiceNaming) {
37
+ return Nomenclature.shortCircuitServiceName(this.config, ...serviceArgs)
38
+ }
36
39
  const { type, id, kind } = this.constructor
37
40
  return Nomenclature.serviceName(type, kind, id, ...serviceArgs)
38
41
  }
@@ -35,9 +35,41 @@ function isShallowRepository () {
35
35
  return sanitizedExec('git', ['rev-parse', '--is-shallow-repository']) === 'true'
36
36
  }
37
37
 
38
+ function getGitVersion () {
39
+ const gitVersionString = sanitizedExec('git', ['version'])
40
+ const gitVersionMatches = gitVersionString.match(/git version (\d+)\.(\d+)\.(\d+)/)
41
+ try {
42
+ return {
43
+ major: parseInt(gitVersionMatches[1]),
44
+ minor: parseInt(gitVersionMatches[2]),
45
+ patch: parseInt(gitVersionMatches[3])
46
+ }
47
+ } catch (e) {
48
+ return null
49
+ }
50
+ }
51
+
38
52
  function unshallowRepository () {
39
- sanitizedExec('git', ['config', 'remote.origin.partialclonefilter', '"blob:none"'])
40
- sanitizedExec('git', ['fetch', '--shallow-since="1 month ago"', '--update-shallow', '--refetch'])
53
+ const gitVersion = getGitVersion()
54
+ if (!gitVersion) {
55
+ log.warn('Git version could not be extracted, so git unshallow will not proceed')
56
+ return
57
+ }
58
+ if (gitVersion.major < 2 || (gitVersion.major === 2 && gitVersion.minor < 27)) {
59
+ log.warn('Git version is <2.27, so git unshallow will not proceed')
60
+ return
61
+ }
62
+ const defaultRemoteName = sanitizedExec('git', ['config', '--default', 'origin', '--get', 'clone.defaultRemoteName'])
63
+ const revParseHead = sanitizedExec('git', ['rev-parse', 'HEAD'])
64
+ sanitizedExec('git', [
65
+ 'fetch',
66
+ '--shallow-since="1 month ago"',
67
+ '--update-shallow',
68
+ '--filter=blob:none',
69
+ '--recurse-submodules=no',
70
+ defaultRemoteName,
71
+ revParseHead
72
+ ])
41
73
  }
42
74
 
43
75
  function getRepositoryUrl () {
@@ -56,7 +88,7 @@ function getLatestCommits () {
56
88
  }
57
89
  }
58
90
 
59
- function getCommitsToUpload (commitsToExclude) {
91
+ function getCommitsToUpload (commitsToExclude, commitsToInclude) {
60
92
  const commitsToExcludeString = commitsToExclude.map(commit => `^${commit}`)
61
93
 
62
94
  try {
@@ -68,8 +100,8 @@ function getCommitsToUpload (commitsToExclude) {
68
100
  '--no-object-names',
69
101
  '--filter=blob:none',
70
102
  '--since="1 month ago"',
71
- 'HEAD',
72
- ...commitsToExcludeString
103
+ ...commitsToExcludeString,
104
+ ...commitsToInclude
73
105
  ],
74
106
  { stdio: 'pipe', maxBuffer: GIT_REV_LIST_MAX_BUFFER })
75
107
  .toString()
@@ -13,6 +13,8 @@ const {
13
13
  } = require('./tags')
14
14
 
15
15
  const { normalizeRef } = require('./ci')
16
+ const log = require('../../log')
17
+ const { URL } = require('url')
16
18
 
17
19
  function removeEmptyValues (tags) {
18
20
  return Object.keys(tags).reduce((filteredTags, tag) => {
@@ -39,6 +41,37 @@ function filterSensitiveInfoFromRepository (repositoryUrl) {
39
41
  }
40
42
  }
41
43
 
44
+ // The regex is extracted from
45
+ // https://github.com/jonschlinkert/is-git-url/blob/396965ffabf2f46656c8af4c47bef1d69f09292e/index.js#L9C15-L9C87
46
+ function validateGitRepositoryUrl (repoUrl) {
47
+ return /(?:git|ssh|https?|git@[-\w.]+):(\/\/)?(.*?)(\.git)(\/?|#[-\d\w._]+?)$/.test(repoUrl)
48
+ }
49
+
50
+ function validateGitCommitSha (gitCommitSha) {
51
+ const isValidSha1 = /^[0-9a-f]{40}$/.test(gitCommitSha)
52
+ const isValidSha256 = /^[0-9a-f]{64}$/.test(gitCommitSha)
53
+ return isValidSha1 || isValidSha256
54
+ }
55
+
56
+ function removeInvalidGitMetadata (metadata) {
57
+ return Object.keys(metadata).reduce((filteredTags, tag) => {
58
+ if (tag === GIT_REPOSITORY_URL) {
59
+ if (!validateGitRepositoryUrl(metadata[GIT_REPOSITORY_URL])) {
60
+ log.error('DD_GIT_REPOSITORY_URL must be a valid URL')
61
+ return filteredTags
62
+ }
63
+ }
64
+ if (tag === GIT_COMMIT_SHA) {
65
+ if (!validateGitCommitSha(metadata[GIT_COMMIT_SHA])) {
66
+ log.error('DD_GIT_COMMIT_SHA must be a full-length git SHA')
67
+ return filteredTags
68
+ }
69
+ }
70
+ filteredTags[tag] = metadata[tag]
71
+ return filteredTags
72
+ }, {})
73
+ }
74
+
42
75
  function getUserProviderGitMetadata () {
43
76
  const {
44
77
  DD_GIT_COMMIT_SHA,
@@ -62,7 +95,7 @@ function getUserProviderGitMetadata () {
62
95
  tag = normalizeRef(DD_GIT_BRANCH)
63
96
  }
64
97
 
65
- return removeEmptyValues({
98
+ const metadata = removeEmptyValues({
66
99
  [GIT_COMMIT_SHA]: DD_GIT_COMMIT_SHA,
67
100
  [GIT_BRANCH]: branch,
68
101
  [GIT_REPOSITORY_URL]: filterSensitiveInfoFromRepository(DD_GIT_REPOSITORY_URL),
@@ -75,6 +108,7 @@ function getUserProviderGitMetadata () {
75
108
  [GIT_COMMIT_AUTHOR_EMAIL]: DD_GIT_COMMIT_AUTHOR_EMAIL,
76
109
  [GIT_COMMIT_AUTHOR_DATE]: DD_GIT_COMMIT_AUTHOR_DATE
77
110
  })
111
+ return removeInvalidGitMetadata(metadata)
78
112
  }
79
113
 
80
- module.exports = { getUserProviderGitMetadata }
114
+ module.exports = { getUserProviderGitMetadata, validateGitRepositoryUrl, validateGitCommitSha }
@@ -12,7 +12,7 @@ const WallProfiler = require('./profilers/wall')
12
12
  const SpaceProfiler = require('./profilers/space')
13
13
  const { oomExportStrategies, snapshotKinds } = require('./constants')
14
14
  const { tagger } = require('./tagger')
15
- const { isTrue } = require('../util')
15
+ const { isFalse, isTrue } = require('../util')
16
16
 
17
17
  class Config {
18
18
  constructor (options = {}) {
@@ -32,6 +32,8 @@ class Config {
32
32
  DD_PROFILING_SOURCE_MAP,
33
33
  DD_PROFILING_UPLOAD_PERIOD,
34
34
  DD_PROFILING_PPROF_PREFIX,
35
+ DD_PROFILING_HEAP_ENABLED,
36
+ DD_PROFILING_WALLTIME_ENABLED,
35
37
  DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED,
36
38
  DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE,
37
39
  DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT,
@@ -106,10 +108,9 @@ class Config {
106
108
  exportCommand
107
109
  }
108
110
 
109
- const profilers = coalesce(options.profilers, DD_PROFILING_PROFILERS, [
110
- new WallProfiler(this),
111
- new SpaceProfiler(this)
112
- ])
111
+ const profilers = options.profilers
112
+ ? options.profilers
113
+ : getProfilers({ DD_PROFILING_HEAP_ENABLED, DD_PROFILING_WALLTIME_ENABLED, DD_PROFILING_PROFILERS })
113
114
 
114
115
  this.profilers = ensureProfilers(profilers, this)
115
116
  }
@@ -117,6 +118,32 @@ class Config {
117
118
 
118
119
  module.exports = { Config }
119
120
 
121
+ function getProfilers ({ DD_PROFILING_HEAP_ENABLED, DD_PROFILING_WALLTIME_ENABLED, DD_PROFILING_PROFILERS }) {
122
+ // First consider "legacy" DD_PROFILING_PROFILERS env variable, defaulting to wall + space
123
+ // Use a Set to avoid duplicates
124
+ const profilers = new Set(coalesce(DD_PROFILING_PROFILERS, 'wall,space').split(','))
125
+
126
+ // Add/remove wall depending on the value of DD_PROFILING_WALLTIME_ENABLED
127
+ if (DD_PROFILING_WALLTIME_ENABLED != null) {
128
+ if (isTrue(DD_PROFILING_WALLTIME_ENABLED)) {
129
+ profilers.add('wall')
130
+ } else if (isFalse(DD_PROFILING_WALLTIME_ENABLED)) {
131
+ profilers.delete('wall')
132
+ }
133
+ }
134
+
135
+ // Add/remove wall depending on the value of DD_PROFILING_HEAP_ENABLED
136
+ if (DD_PROFILING_HEAP_ENABLED != null) {
137
+ if (isTrue(DD_PROFILING_HEAP_ENABLED)) {
138
+ profilers.add('space')
139
+ } else if (isFalse(DD_PROFILING_HEAP_ENABLED)) {
140
+ profilers.delete('space')
141
+ }
142
+ }
143
+
144
+ return [...profilers]
145
+ }
146
+
120
147
  function getExportStrategy (name, options) {
121
148
  const strategy = Object.values(oomExportStrategies).find(value => value === name)
122
149
  if (strategy === undefined) {
@@ -3,7 +3,7 @@ const { schemaDefinitions } = require('./schemas')
3
3
  class SchemaManager {
4
4
  constructor () {
5
5
  this.schemas = schemaDefinitions
6
- this.config = { spanAttributeSchema: 'v0' }
6
+ this.config = { spanAttributeSchema: 'v0', traceRemoveIntegrationServiceNamesEnabled: false }
7
7
  }
8
8
 
9
9
  get schema () {
@@ -14,6 +14,10 @@ class SchemaManager {
14
14
  return this.config.spanAttributeSchema
15
15
  }
16
16
 
17
+ get shouldUseConsistentServiceNaming () {
18
+ return this.config.traceRemoveIntegrationServiceNamesEnabled && this.version === 'v0'
19
+ }
20
+
17
21
  opName (type, kind, plugin, ...opNameArgs) {
18
22
  return this.schema.getOpName(type, kind, plugin, ...opNameArgs)
19
23
  }
@@ -22,6 +26,14 @@ class SchemaManager {
22
26
  return this.schema.getServiceName(type, kind, plugin, this.config.service, ...serviceNameArgs)
23
27
  }
24
28
 
29
+ shortCircuitServiceName (pluginConfig, ...args) {
30
+ // We're short-circuiting, so we do not obey custom service functions
31
+ if (typeof pluginConfig.service === 'function') {
32
+ return this.config.service
33
+ }
34
+ return pluginConfig.service || this.config.service
35
+ }
36
+
25
37
  configure (config = {}) {
26
38
  this.config = config
27
39
  }
@@ -1,13 +1,22 @@
1
1
  const { identityService } = require('../util')
2
+ const { DD_MAJOR } = require('../../../../../../version')
2
3
 
3
4
  const web = {
4
5
  client: {
6
+ grpc: {
7
+ opName: () => DD_MAJOR <= 2 ? 'grpc.request' : 'grpc.client',
8
+ serviceName: identityService
9
+ },
5
10
  moleculer: {
6
11
  opName: () => 'moleculer.call',
7
12
  serviceName: identityService
8
13
  }
9
14
  },
10
15
  server: {
16
+ grpc: {
17
+ opName: () => DD_MAJOR <= 2 ? 'grpc.request' : 'grpc.server',
18
+ serviceName: identityService
19
+ },
11
20
  moleculer: {
12
21
  opName: () => 'moleculer.action',
13
22
  serviceName: identityService
@@ -2,12 +2,20 @@ const { identityService } = require('../util')
2
2
 
3
3
  const web = {
4
4
  client: {
5
+ grpc: {
6
+ opName: () => 'grpc.client.request',
7
+ serviceName: identityService
8
+ },
5
9
  moleculer: {
6
10
  opName: () => 'moleculer.client.request',
7
11
  serviceName: identityService
8
12
  }
9
13
  },
10
14
  server: {
15
+ grpc: {
16
+ opName: () => 'grpc.server.request',
17
+ serviceName: identityService
18
+ },
11
19
  moleculer: {
12
20
  opName: () => 'moleculer.server.request',
13
21
  serviceName: identityService