dd-trace 5.59.0 → 5.61.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/index.d.ts +6 -0
  2. package/package.json +7 -7
  3. package/packages/datadog-code-origin/index.js +3 -0
  4. package/packages/datadog-instrumentations/src/apollo-server.js +14 -3
  5. package/packages/datadog-instrumentations/src/azure-functions.js +5 -0
  6. package/packages/datadog-instrumentations/src/azure-service-bus.js +38 -0
  7. package/packages/datadog-instrumentations/src/fastify.js +17 -0
  8. package/packages/datadog-instrumentations/src/helpers/hooks.js +1 -0
  9. package/packages/datadog-instrumentations/src/next.js +17 -18
  10. package/packages/datadog-instrumentations/src/openai.js +13 -114
  11. package/packages/datadog-instrumentations/src/sequelize.js +4 -14
  12. package/packages/datadog-plugin-aws-sdk/src/services/bedrockruntime/tracing.js +6 -38
  13. package/packages/datadog-plugin-azure-functions/src/index.js +57 -28
  14. package/packages/datadog-plugin-azure-service-bus/src/index.js +15 -0
  15. package/packages/datadog-plugin-azure-service-bus/src/producer.js +36 -0
  16. package/packages/datadog-plugin-cypress/src/cypress-plugin.js +24 -23
  17. package/packages/datadog-plugin-google-cloud-vertexai/src/tracing.js +3 -155
  18. package/packages/datadog-plugin-langchain/src/handlers/default.js +0 -18
  19. package/packages/datadog-plugin-langchain/src/handlers/embedding.js +0 -48
  20. package/packages/datadog-plugin-langchain/src/handlers/language_models.js +18 -0
  21. package/packages/datadog-plugin-langchain/src/tracing.js +5 -17
  22. package/packages/datadog-plugin-openai/src/stream-helpers.js +114 -0
  23. package/packages/datadog-plugin-openai/src/tracing.js +38 -0
  24. package/packages/dd-trace/src/appsec/iast/analyzers/cookie-analyzer.js +8 -1
  25. package/packages/dd-trace/src/appsec/iast/analyzers/hsts-header-missing-analyzer.js +2 -2
  26. package/packages/dd-trace/src/appsec/iast/analyzers/missing-header-analyzer.js +11 -10
  27. package/packages/dd-trace/src/appsec/iast/analyzers/set-cookies-header-interceptor.js +25 -18
  28. package/packages/dd-trace/src/appsec/iast/analyzers/sql-injection-analyzer.js +13 -5
  29. package/packages/dd-trace/src/appsec/iast/analyzers/unvalidated-redirect-analyzer.js +5 -1
  30. package/packages/dd-trace/src/appsec/iast/analyzers/xcontenttype-header-missing-analyzer.js +2 -2
  31. package/packages/dd-trace/src/appsec/iast/iast-plugin.js +4 -0
  32. package/packages/dd-trace/src/appsec/iast/index.js +25 -7
  33. package/packages/dd-trace/src/appsec/iast/taint-tracking/plugin.js +79 -21
  34. package/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/index.js +1 -3
  35. package/packages/dd-trace/src/appsec/rasp/fs-plugin.js +0 -4
  36. package/packages/dd-trace/src/appsec/reporter.js +3 -15
  37. package/packages/dd-trace/src/appsec/waf/index.js +20 -1
  38. package/packages/dd-trace/src/ci-visibility/dynamic-instrumentation/index.js +2 -1
  39. package/packages/dd-trace/src/config.js +0 -16
  40. package/packages/dd-trace/src/datastreams/schemas/schema_builder.js +4 -8
  41. package/packages/dd-trace/src/datastreams/schemas/schema_sampler.js +2 -4
  42. package/packages/dd-trace/src/debugger/config.js +16 -0
  43. package/packages/dd-trace/src/debugger/devtools_client/breakpoints.js +1 -1
  44. package/packages/dd-trace/src/debugger/devtools_client/config.js +2 -6
  45. package/packages/dd-trace/src/debugger/devtools_client/index.js +1 -1
  46. package/packages/dd-trace/src/debugger/devtools_client/log.js +19 -0
  47. package/packages/dd-trace/src/debugger/devtools_client/remote_config.js +1 -1
  48. package/packages/dd-trace/src/debugger/devtools_client/send.js +1 -1
  49. package/packages/dd-trace/src/debugger/devtools_client/snapshot/index.js +1 -1
  50. package/packages/dd-trace/src/debugger/devtools_client/state.js +1 -1
  51. package/packages/dd-trace/src/debugger/devtools_client/status.js +1 -1
  52. package/packages/dd-trace/src/debugger/index.js +13 -3
  53. package/packages/dd-trace/src/plugins/index.js +1 -0
  54. package/packages/dd-trace/src/plugins/util/ci.js +23 -7
  55. package/packages/dd-trace/src/plugins/util/git.js +53 -18
  56. package/packages/dd-trace/src/plugins/util/tags.js +8 -6
  57. package/packages/dd-trace/src/profiling/profilers/events.js +3 -3
  58. package/packages/dd-trace/src/profiling/profilers/space.js +4 -3
  59. package/packages/dd-trace/src/profiling/profilers/wall.js +5 -4
  60. package/packages/dd-trace/src/remote_config/capabilities.js +2 -1
  61. package/packages/dd-trace/src/remote_config/index.js +2 -0
  62. package/packages/dd-trace/src/remote_config/scheduler.js +2 -1
  63. package/packages/dd-trace/src/service-naming/schemas/v0/messaging.js +4 -0
  64. package/packages/dd-trace/src/supported-configurations.json +1 -0
  65. package/packages/datadog-plugin-langchain/src/handlers/chain.js +0 -50
  66. package/packages/datadog-plugin-langchain/src/handlers/language_models/chat_model.js +0 -101
  67. package/packages/datadog-plugin-langchain/src/handlers/language_models/index.js +0 -48
  68. package/packages/datadog-plugin-langchain/src/handlers/language_models/llm.js +0 -58
@@ -3,7 +3,6 @@
3
3
  const dc = require('dc-polyfill')
4
4
  const zlib = require('zlib')
5
5
 
6
- const Limiter = require('../rate_limiter')
7
6
  const { storage } = require('../../../datadog-core')
8
7
  const web = require('../plugins/util/web')
9
8
  const { ipHeaderList } = require('../plugins/util/ip_extractor')
@@ -15,7 +14,6 @@ const {
15
14
  updateWafRequestsMetricTags,
16
15
  updateRaspRequestsMetricTags,
17
16
  updateRaspRuleSkippedMetricTags,
18
- updateRateLimitedMetric,
19
17
  getRequestMetrics
20
18
  } = require('./telemetry')
21
19
  const { keepTrace } = require('../priority_sampler')
@@ -31,9 +29,6 @@ const COLLECTED_REQUEST_BODY_MAX_ELEMENTS_PER_NODE = 256
31
29
 
32
30
  const telemetryLogCh = dc.channel('datadog:telemetry:log')
33
31
 
34
- // default limiter, configurable with setRateLimit()
35
- let limiter = new Limiter(100)
36
-
37
32
  const config = {
38
33
  headersExtendedCollectionEnabled: false,
39
34
  maxHeadersCollected: 0,
@@ -91,7 +86,6 @@ const NON_EXTENDED_REQUEST_HEADERS = new Set([...requestHeadersList, ...eventHea
91
86
  const NON_EXTENDED_RESPONSE_HEADERS = new Set(contentHeaderList)
92
87
 
93
88
  function init (_config) {
94
- limiter = new Limiter(_config.rateLimit)
95
89
  config.headersExtendedCollectionEnabled = _config.extendedHeadersCollection.enabled
96
90
  config.maxHeadersCollected = _config.extendedHeadersCollection.maxHeaders
97
91
  config.headersRedaction = _config.extendedHeadersCollection.redaction
@@ -325,12 +319,6 @@ function reportAttack (attackData) {
325
319
  'appsec.event': 'true'
326
320
  }
327
321
 
328
- if (limiter.isAllowed()) {
329
- keepTrace(rootSpan, ASM)
330
- } else {
331
- updateRateLimitedMetric(req)
332
- }
333
-
334
322
  // TODO: maybe add this to format.js later (to take decision as late as possible)
335
323
  if (!currentTags['_dd.origin']) {
336
324
  newTags['_dd.origin'] = 'appsec'
@@ -430,8 +418,8 @@ function isRaspAttack (events) {
430
418
  return events.some(e => e.rule?.tags?.module === 'rasp')
431
419
  }
432
420
 
433
- function isFingerprintAttribute (attribute) {
434
- return attribute.startsWith('_dd.appsec.fp')
421
+ function isSchemaAttribute (attribute) {
422
+ return attribute.startsWith('_dd.appsec.s.')
435
423
  }
436
424
 
437
425
  function reportAttributes (attributes) {
@@ -444,7 +432,7 @@ function reportAttributes (attributes) {
444
432
 
445
433
  const tags = {}
446
434
  for (let [tag, value] of Object.entries(attributes)) {
447
- if (!isFingerprintAttribute(tag)) {
435
+ if (isSchemaAttribute(tag)) {
448
436
  const gzippedValue = zlib.gzipSync(JSON.stringify(value))
449
437
  value = gzippedValue.toString('base64')
450
438
  }
@@ -3,6 +3,11 @@
3
3
  const { storage } = require('../../../../datadog-core')
4
4
  const log = require('../../log')
5
5
  const Reporter = require('../reporter')
6
+ const Limiter = require('../../rate_limiter')
7
+ const { keepTrace } = require('../../priority_sampler')
8
+ const { ASM } = require('../../standalone/product')
9
+ const web = require('../../plugins/util/web')
10
+ const { updateRateLimitedMetric } = require('../telemetry')
6
11
 
7
12
  class WafUpdateError extends Error {
8
13
  constructor (diagnosticErrors) {
@@ -12,6 +17,8 @@ class WafUpdateError extends Error {
12
17
  }
13
18
  }
14
19
 
20
+ let limiter = new Limiter(100)
21
+
15
22
  const waf = {
16
23
  wafManager: null,
17
24
  init,
@@ -27,6 +34,8 @@ const waf = {
27
34
  function init (rules, config) {
28
35
  destroy()
29
36
 
37
+ limiter = new Limiter(config.rateLimit)
38
+
30
39
  // dirty require to make startup faster for serverless
31
40
  const WAFManager = require('./waf_manager')
32
41
 
@@ -99,8 +108,18 @@ function run (data, req, raspRule) {
99
108
  }
100
109
 
101
110
  const wafContext = waf.wafManager.getWAFContext(req)
111
+ const result = wafContext.run(data, raspRule)
112
+
113
+ if (result?.keep) {
114
+ if (limiter.isAllowed()) {
115
+ const rootSpan = web.root(req)
116
+ keepTrace(rootSpan, ASM)
117
+ } else {
118
+ updateRateLimitedMetric(req)
119
+ }
120
+ }
102
121
 
103
- return wafContext.run(data, raspRule)
122
+ return result
104
123
  }
105
124
 
106
125
  function disposeContext (req) {
@@ -5,6 +5,7 @@ const { Worker, threadId: parentThreadId } = require('worker_threads')
5
5
  const { randomUUID } = require('crypto')
6
6
  const log = require('../../log')
7
7
  const { getEnvironmentVariables } = require('../../config-helper')
8
+ const getDebuggerConfig = require('../../debugger/config')
8
9
 
9
10
  const probeIdToResolveBreakpointSet = new Map()
10
11
  const probeIdToResolveBreakpointRemove = new Map()
@@ -82,7 +83,7 @@ class TestVisDynamicInstrumentation {
82
83
  DD_INSTRUMENTATION_TELEMETRY_ENABLED: 'false'
83
84
  },
84
85
  workerData: {
85
- config: this._config.serialize(),
86
+ config: getDebuggerConfig(this._config),
86
87
  parentThreadId,
87
88
  probePort: probeChannel.port1,
88
89
  configPort: configChannel.port1,
@@ -1546,22 +1546,6 @@ class Config {
1546
1546
  }
1547
1547
  }
1548
1548
  }
1549
-
1550
- // TODO: Refactor the Config class so it never produces any config objects that are incompatible with MessageChannel
1551
- /**
1552
- * Serializes the config object so it can be passed over a Worker Thread MessageChannel.
1553
- * @returns {Object} The serialized config object.
1554
- */
1555
- serialize () {
1556
- // URL objects cannot be serialized over the MessageChannel, so we need to convert them to strings first
1557
- if (this.url instanceof URL) {
1558
- const config = { ...this }
1559
- config.url = this.url.toString()
1560
- return config
1561
- }
1562
-
1563
- return this
1564
- }
1565
1549
  }
1566
1550
 
1567
1551
  function handleOtel (tagString) {
@@ -66,17 +66,13 @@ class SchemaBuilder {
66
66
  }
67
67
 
68
68
  class OpenApiSchema {
69
- constructor () {
70
- this.openapi = '3.0.0'
71
- this.components = new OpenApiComponents()
72
- }
69
+ openapi = '3.0.0'
70
+ components = new OpenApiComponents()
73
71
  }
74
72
 
75
73
  OpenApiSchema.SCHEMA = class {
76
- constructor () {
77
- this.type = 'object'
78
- this.properties = {}
79
- }
74
+ type = 'object'
75
+ properties = {}
80
76
  }
81
77
 
82
78
  OpenApiSchema.PROPERTY = class {
@@ -3,10 +3,8 @@
3
3
  const SAMPLE_INTERVAL_MILLIS = 30 * 1000
4
4
 
5
5
  class SchemaSampler {
6
- constructor () {
7
- this.weight = 0
8
- this.lastSampleMs = 0
9
- }
6
+ weight = 0
7
+ lastSampleMs = 0
10
8
 
11
9
  trySample (currentTimeMs) {
12
10
  if (currentTimeMs >= this.lastSampleMs + SAMPLE_INTERVAL_MILLIS) {
@@ -0,0 +1,16 @@
1
+ 'use strict'
2
+
3
+ module.exports = function getDebuggerConfig (config) {
4
+ return {
5
+ commitSHA: config.commitSHA,
6
+ debug: config.debug,
7
+ dynamicInstrumentation: config.dynamicInstrumentation,
8
+ hostname: config.hostname,
9
+ logLevel: config.logLevel,
10
+ port: config.port,
11
+ repositoryUrl: config.repositoryUrl,
12
+ runtimeId: config.tags['runtime-id'],
13
+ service: config.service,
14
+ url: config.url?.toString(),
15
+ }
16
+ }
@@ -12,7 +12,7 @@ const {
12
12
  breakpointToProbes,
13
13
  probeToLocation
14
14
  } = require('./state')
15
- const log = require('../../log')
15
+ const log = require('./log')
16
16
 
17
17
  let sessionStarted = false
18
18
  const probes = new Map()
@@ -2,14 +2,10 @@
2
2
 
3
3
  const { workerData: { config: parentConfig, parentThreadId, configPort } } = require('node:worker_threads')
4
4
  const { format } = require('node:url')
5
- const log = require('../../log')
5
+ const log = require('./log')
6
6
 
7
7
  const config = module.exports = {
8
- dynamicInstrumentation: parentConfig.dynamicInstrumentation,
9
- runtimeId: parentConfig.tags['runtime-id'],
10
- service: parentConfig.service,
11
- commitSHA: parentConfig.commitSHA,
12
- repositoryUrl: parentConfig.repositoryUrl,
8
+ ...parentConfig,
13
9
  parentThreadId,
14
10
  maxTotalPayloadSize: 5 * 1024 * 1024 // 5MB
15
11
  }
@@ -9,7 +9,7 @@ const { getStackFromCallFrames } = require('./state')
9
9
  const { ackEmitting } = require('./status')
10
10
  const { parentThreadId } = require('./config')
11
11
  const { MAX_SNAPSHOTS_PER_SECOND_GLOBALLY } = require('./defaults')
12
- const log = require('../../log')
12
+ const log = require('./log')
13
13
  const { version } = require('../../../../../package.json')
14
14
  const { NODE_MAJOR } = require('../../../../../version')
15
15
 
@@ -0,0 +1,19 @@
1
+ 'use strict'
2
+
3
+ const { workerData } = require('node:worker_threads')
4
+
5
+ // For testing purposes, we allow `workerData` to be undefined and fallback to a default config
6
+ const { config: { debug, logLevel }, logPort } = workerData ?? { config: { debug: false } }
7
+
8
+ const LEVELS = ['error', 'warn', 'info', 'debug']
9
+ const on = (level, ...args) => {
10
+ if (typeof args[0] === 'function') {
11
+ args = [args[0]()]
12
+ }
13
+ logPort.postMessage({ level, args })
14
+ }
15
+ const off = () => {}
16
+
17
+ for (const level of LEVELS) {
18
+ module.exports[level] = debug && LEVELS.indexOf(logLevel) >= LEVELS.indexOf(level) ? on.bind(null, level) : off
19
+ }
@@ -3,7 +3,7 @@
3
3
  const { workerData: { probePort } } = require('node:worker_threads')
4
4
  const { addBreakpoint, removeBreakpoint, modifyBreakpoint } = require('./breakpoints')
5
5
  const { ackReceived, ackInstalled, ackError } = require('./status')
6
- const log = require('../../log')
6
+ const log = require('./log')
7
7
 
8
8
  // Example log line probe (simplified):
9
9
  // {
@@ -7,7 +7,7 @@ const config = require('./config')
7
7
  const JSONBuffer = require('./json-buffer')
8
8
  const request = require('../../exporters/common/request')
9
9
  const { GIT_COMMIT_SHA, GIT_REPOSITORY_URL } = require('../../plugins/util/tags')
10
- const log = require('../../log')
10
+ const log = require('./log')
11
11
  const { version } = require('../../../../../package.json')
12
12
  const { getEnvironmentVariable } = require('../../config-helper')
13
13
 
@@ -2,7 +2,7 @@
2
2
 
3
3
  const { getRuntimeObject } = require('./collector')
4
4
  const { processRawState } = require('./processor')
5
- const log = require('../../../log')
5
+ const log = require('../log')
6
6
 
7
7
  const DEFAULT_MAX_REFERENCE_DEPTH = 3
8
8
  const DEFAULT_MAX_COLLECTION_SIZE = 100
@@ -4,7 +4,7 @@ const { join, dirname } = require('path')
4
4
  const { normalize } = require('source-map/lib/util')
5
5
  const { loadSourceMapSync } = require('./source-maps')
6
6
  const session = require('./session')
7
- const log = require('../../log')
7
+ const log = require('./log')
8
8
 
9
9
  const WINDOWS_DRIVE_LETTER_REGEX = /[a-zA-Z]/
10
10
 
@@ -5,7 +5,7 @@ const config = require('./config')
5
5
  const JSONBuffer = require('./json-buffer')
6
6
  const request = require('../../exporters/common/request')
7
7
  const FormData = require('../../exporters/common/form-data')
8
- const log = require('../../log')
8
+ const log = require('./log')
9
9
 
10
10
  module.exports = {
11
11
  ackReceived,
@@ -4,6 +4,7 @@ const { readFile } = require('fs')
4
4
  const { types } = require('util')
5
5
  const { join } = require('path')
6
6
  const { Worker, MessageChannel, threadId: parentThreadId } = require('worker_threads')
7
+ const getDebuggerConfig = require('./config')
7
8
  const log = require('../log')
8
9
 
9
10
  let worker = null
@@ -25,6 +26,7 @@ function start (config, rc) {
25
26
 
26
27
  const rcAckCallbacks = new Map()
27
28
  const probeChannel = new MessageChannel()
29
+ const logChannel = new MessageChannel()
28
30
  configChannel = new MessageChannel()
29
31
 
30
32
  process[Symbol.for('datadog:node:util:types')] = types
@@ -54,18 +56,24 @@ function start (config, rc) {
54
56
  })
55
57
  probeChannel.port2.on('messageerror', (err) => log.error('[debugger] received "messageerror" on probe port', err))
56
58
 
59
+ logChannel.port2.on('message', ({ level, args }) => {
60
+ log[level](...args)
61
+ })
62
+ logChannel.port2.on('messageerror', (err) => log.error('[debugger] received "messageerror" on log port', err))
63
+
57
64
  worker = new Worker(
58
65
  join(__dirname, 'devtools_client', 'index.js'),
59
66
  {
60
67
  execArgv: [], // Avoid worker thread inheriting the `-r` command line argument
61
68
  env, // Avoid worker thread inheriting the `NODE_OPTIONS` environment variable (in case it contains `-r`)
62
69
  workerData: {
63
- config: config.serialize(),
70
+ config: getDebuggerConfig(config),
64
71
  parentThreadId,
65
72
  probePort: probeChannel.port1,
73
+ logPort: logChannel.port1,
66
74
  configPort: configChannel.port1
67
75
  },
68
- transferList: [probeChannel.port1, configChannel.port1]
76
+ transferList: [probeChannel.port1, logChannel.port1, configChannel.port1]
69
77
  }
70
78
  )
71
79
 
@@ -94,13 +102,15 @@ function start (config, rc) {
94
102
  worker.unref()
95
103
  probeChannel.port1.unref()
96
104
  probeChannel.port2.unref()
105
+ logChannel.port1.unref()
106
+ logChannel.port2.unref()
97
107
  configChannel.port1.unref()
98
108
  configChannel.port2.unref()
99
109
  }
100
110
 
101
111
  function configure (config) {
102
112
  if (configChannel === null) return
103
- configChannel.port2.postMessage(config.serialize())
113
+ configChannel.port2.postMessage(getDebuggerConfig(config))
104
114
  }
105
115
 
106
116
  function readProbeFile (path, cb) {
@@ -4,6 +4,7 @@ module.exports = {
4
4
  get '@apollo/gateway' () { return require('../../../datadog-plugin-apollo/src') },
5
5
  get '@aws-sdk/smithy-client' () { return require('../../../datadog-plugin-aws-sdk/src') },
6
6
  get '@azure/functions' () { return require('../../../datadog-plugin-azure-functions/src') },
7
+ get '@azure/service-bus' () { return require('../../../datadog-plugin-azure-service-bus/src') },
7
8
  get '@cucumber/cucumber' () { return require('../../../datadog-plugin-cucumber/src') },
8
9
  get '@playwright/test' () { return require('../../../datadog-plugin-playwright/src') },
9
10
  get '@elastic/elasticsearch' () { return require('../../../datadog-plugin-elasticsearch/src') },
@@ -27,7 +27,8 @@ const {
27
27
  GIT_COMMIT_COMMITTER_EMAIL,
28
28
  CI_NODE_LABELS,
29
29
  CI_NODE_NAME,
30
- PR_NUMBER
30
+ PR_NUMBER,
31
+ CI_JOB_ID
31
32
  } = require('./tags')
32
33
  const { filterSensitiveInfoFromRepository } = require('./url')
33
34
  const { getEnvironmentVariable, getEnvironmentVariables } = require('../../config-helper')
@@ -85,6 +86,13 @@ function resolveTilde (filePath) {
85
86
  return filePath
86
87
  }
87
88
 
89
+ function normalizeNumber (number) {
90
+ if (typeof number !== 'number') {
91
+ return number
92
+ }
93
+ return number.toString()
94
+ }
95
+
88
96
  function getGitHubEventPayload () {
89
97
  if (!getEnvironmentVariable('GITHUB_EVENT_PATH')) {
90
98
  return
@@ -210,7 +218,8 @@ module.exports = {
210
218
  [CI_NODE_LABELS]: CI_RUNNER_TAGS,
211
219
  [CI_NODE_NAME]: CI_RUNNER_ID,
212
220
  [GIT_PULL_REQUEST_BASE_BRANCH]: CI_MERGE_REQUEST_TARGET_BRANCH_NAME,
213
- [PR_NUMBER]: CI_MERGE_REQUEST_IID
221
+ [PR_NUMBER]: CI_MERGE_REQUEST_IID,
222
+ [CI_JOB_ID]: GITLAB_CI_JOB_ID
214
223
  }
215
224
  }
216
225
 
@@ -247,7 +256,8 @@ module.exports = {
247
256
  CIRCLE_WORKFLOW_ID,
248
257
  CIRCLE_BUILD_NUM,
249
258
  }),
250
- [PR_NUMBER]: CIRCLE_PR_NUMBER
259
+ [PR_NUMBER]: CIRCLE_PR_NUMBER,
260
+ [CI_JOB_ID]: CIRCLE_BUILD_NUM
251
261
  }
252
262
  }
253
263
 
@@ -298,7 +308,8 @@ module.exports = {
298
308
  GITHUB_REPOSITORY,
299
309
  GITHUB_RUN_ID,
300
310
  GITHUB_RUN_ATTEMPT
301
- })
311
+ }),
312
+ [CI_JOB_ID]: GITHUB_JOB
302
313
  }
303
314
  if (GITHUB_BASE_REF) { // `pull_request` or `pull_request_target` event
304
315
  tags[GIT_PULL_REQUEST_BASE_BRANCH] = GITHUB_BASE_REF
@@ -407,7 +418,8 @@ module.exports = {
407
418
  [CI_JOB_NAME]: SYSTEM_JOBDISPLAYNAME,
408
419
  [CI_ENV_VARS]: JSON.stringify({ SYSTEM_TEAMPROJECTID, BUILD_BUILDID, SYSTEM_JOBID }),
409
420
  [PR_NUMBER]: SYSTEM_PULLREQUEST_PULLREQUESTNUMBER,
410
- [GIT_PULL_REQUEST_BASE_BRANCH]: SYSTEM_PULLREQUEST_TARGETBRANCH
421
+ [GIT_PULL_REQUEST_BASE_BRANCH]: SYSTEM_PULLREQUEST_TARGETBRANCH,
422
+ [CI_JOB_ID]: SYSTEM_JOBID
411
423
  }
412
424
 
413
425
  if (SYSTEM_TEAMFOUNDATIONSERVERURI && SYSTEM_TEAMPROJECTID && BUILD_BUILDID) {
@@ -510,7 +522,8 @@ module.exports = {
510
522
  BUILDKITE_MESSAGE,
511
523
  BUILDKITE_AGENT_ID,
512
524
  BUILDKITE_PULL_REQUEST,
513
- BUILDKITE_PULL_REQUEST_BASE_BRANCH
525
+ BUILDKITE_PULL_REQUEST_BASE_BRANCH,
526
+ BUILDKITE_CI_JOB_ID
514
527
  } = env
515
528
 
516
529
  const extraTags = Object.keys(env).filter(envVar =>
@@ -542,6 +555,7 @@ module.exports = {
542
555
  [CI_NODE_NAME]: BUILDKITE_AGENT_ID,
543
556
  [CI_NODE_LABELS]: JSON.stringify(extraTags),
544
557
  [PR_NUMBER]: BUILDKITE_PULL_REQUEST,
558
+ [CI_JOB_ID]: BUILDKITE_CI_JOB_ID
545
559
  }
546
560
 
547
561
  if (BUILDKITE_PULL_REQUEST) {
@@ -682,7 +696,8 @@ module.exports = {
682
696
  CODEBUILD_BUILD_ARN,
683
697
  DD_PIPELINE_EXECUTION_ID,
684
698
  DD_ACTION_EXECUTION_ID
685
- })
699
+ }),
700
+ [CI_JOB_ID]: DD_ACTION_EXECUTION_ID
686
701
  }
687
702
  }
688
703
 
@@ -727,6 +742,7 @@ module.exports = {
727
742
  normalizeTag(tags, GIT_BRANCH, normalizeRef)
728
743
  normalizeTag(tags, GIT_TAG, normalizeRef)
729
744
  normalizeTag(tags, GIT_PULL_REQUEST_BASE_BRANCH, normalizeRef)
745
+ normalizeTag(tags, PR_NUMBER, normalizeNumber)
730
746
 
731
747
  return removeEmptyValues(tags)
732
748
  }
@@ -22,9 +22,9 @@ const {
22
22
  GIT_COMMIT_HEAD_AUTHOR_DATE,
23
23
  GIT_COMMIT_HEAD_AUTHOR_EMAIL,
24
24
  GIT_COMMIT_HEAD_AUTHOR_NAME,
25
- GIT_COMMIT_HEAD_COMMITER_DATE,
26
- GIT_COMMIT_HEAD_COMMITER_EMAIL,
27
- GIT_COMMIT_HEAD_COMMITER_NAME,
25
+ GIT_COMMIT_HEAD_COMMITTER_DATE,
26
+ GIT_COMMIT_HEAD_COMMITTER_EMAIL,
27
+ GIT_COMMIT_HEAD_COMMITTER_NAME,
28
28
  GIT_COMMIT_HEAD_MESSAGE
29
29
  } = require('./tags')
30
30
  const {
@@ -485,27 +485,41 @@ function getGitMetadata (ciMetadata) {
485
485
 
486
486
  if (headCommitSha) {
487
487
  if (isShallowRepository()) {
488
- unshallowRepository(true)
488
+ fetchHeadCommitSha(headCommitSha)
489
489
  }
490
490
 
491
- tags[GIT_COMMIT_HEAD_MESSAGE] =
492
- sanitizedExec('git', ['show', '-s', '--format=%B', headCommitSha], null, null, null, false)
493
-
494
491
  const [
492
+ gitHeadCommitSha,
493
+ headAuthorDate,
495
494
  headAuthorName,
496
495
  headAuthorEmail,
497
- headAuthorDate,
496
+ headCommitterDate,
498
497
  headCommitterName,
499
498
  headCommitterEmail,
500
- headCommitterDate
501
- ] = sanitizedExec('git', ['show', '-s', '--format=%an,%ae,%aI,%cn,%ce,%cI', headCommitSha]).split(',')
502
-
503
- tags[GIT_COMMIT_HEAD_AUTHOR_DATE] = headAuthorDate
504
- tags[GIT_COMMIT_HEAD_AUTHOR_EMAIL] = headAuthorEmail
505
- tags[GIT_COMMIT_HEAD_AUTHOR_NAME] = headAuthorName
506
- tags[GIT_COMMIT_HEAD_COMMITER_DATE] = headCommitterDate
507
- tags[GIT_COMMIT_HEAD_COMMITER_EMAIL] = headCommitterEmail
508
- tags[GIT_COMMIT_HEAD_COMMITER_NAME] = headCommitterName
499
+ headCommitMessage
500
+ ] = sanitizedExec(
501
+ 'git',
502
+ [
503
+ 'show',
504
+ '-s',
505
+ '--format=\'%H","%aI","%an","%ae","%cI","%cn","%ce","%B\'',
506
+ headCommitSha
507
+ ],
508
+ null,
509
+ null,
510
+ null,
511
+ false
512
+ ).split('","')
513
+
514
+ if (gitHeadCommitSha) {
515
+ tags[GIT_COMMIT_HEAD_AUTHOR_DATE] = headAuthorDate
516
+ tags[GIT_COMMIT_HEAD_AUTHOR_EMAIL] = headAuthorEmail
517
+ tags[GIT_COMMIT_HEAD_AUTHOR_NAME] = headAuthorName
518
+ tags[GIT_COMMIT_HEAD_COMMITTER_DATE] = headCommitterDate
519
+ tags[GIT_COMMIT_HEAD_COMMITTER_EMAIL] = headCommitterEmail
520
+ tags[GIT_COMMIT_HEAD_COMMITTER_NAME] = headCommitterName
521
+ tags[GIT_COMMIT_HEAD_MESSAGE] = headCommitMessage
522
+ }
509
523
  }
510
524
 
511
525
  const entries = [
@@ -544,6 +558,26 @@ function getGitInformationDiscrepancy () {
544
558
  return { gitRepositoryUrl, gitCommitSHA }
545
559
  }
546
560
 
561
+ function fetchHeadCommitSha (headSha) {
562
+ const remoteName = getGitRemoteName()
563
+
564
+ sanitizedExec(
565
+ 'git',
566
+ [
567
+ 'fetch',
568
+ '--update-shallow',
569
+ '--filter=blob:none',
570
+ '--recurse-submodules=no',
571
+ '--no-write-fetch-head',
572
+ remoteName,
573
+ headSha
574
+ ],
575
+ { name: TELEMETRY_GIT_COMMAND, tags: { command: 'fetch_head_commit_sha' } },
576
+ { name: TELEMETRY_GIT_COMMAND_MS, tags: { command: 'fetch_head_commit_sha' } },
577
+ { name: TELEMETRY_GIT_COMMAND_ERRORS, tags: { command: 'fetch_head_commit_sha' } }
578
+ )
579
+ }
580
+
547
581
  module.exports = {
548
582
  getGitMetadata,
549
583
  getLatestCommits,
@@ -561,5 +595,6 @@ module.exports = {
561
595
  checkAndFetchBranch,
562
596
  getLocalBranches,
563
597
  getMergeBase,
564
- getCounts
598
+ getCounts,
599
+ fetchHeadCommitSha
565
600
  }
@@ -16,9 +16,9 @@ const GIT_COMMIT_HEAD_MESSAGE = 'git.commit.head.message'
16
16
  const GIT_COMMIT_HEAD_AUTHOR_DATE = 'git.commit.head.author.date'
17
17
  const GIT_COMMIT_HEAD_AUTHOR_EMAIL = 'git.commit.head.author.email'
18
18
  const GIT_COMMIT_HEAD_AUTHOR_NAME = 'git.commit.head.author.name'
19
- const GIT_COMMIT_HEAD_COMMITER_DATE = 'git.commit.head.commiter.date'
20
- const GIT_COMMIT_HEAD_COMMITER_EMAIL = 'git.commit.head.commiter.email'
21
- const GIT_COMMIT_HEAD_COMMITER_NAME = 'git.commit.head.commiter.name'
19
+ const GIT_COMMIT_HEAD_COMMITTER_DATE = 'git.commit.head.committer.date'
20
+ const GIT_COMMIT_HEAD_COMMITTER_EMAIL = 'git.commit.head.committer.email'
21
+ const GIT_COMMIT_HEAD_COMMITTER_NAME = 'git.commit.head.committer.name'
22
22
 
23
23
  const GIT_PULL_REQUEST_BASE_BRANCH_SHA = 'git.pull_request.base_branch_sha'
24
24
  const GIT_PULL_REQUEST_BASE_BRANCH = 'git.pull_request.base_branch'
@@ -31,6 +31,7 @@ const CI_PROVIDER_NAME = 'ci.provider.name'
31
31
  const CI_WORKSPACE_PATH = 'ci.workspace_path'
32
32
  const CI_JOB_URL = 'ci.job.url'
33
33
  const CI_JOB_NAME = 'ci.job.name'
34
+ const CI_JOB_ID = 'ci.job.id'
34
35
  const CI_STAGE_NAME = 'ci.stage.name'
35
36
  const CI_NODE_NAME = 'ci.node.name'
36
37
  const CI_NODE_LABELS = 'ci.node.labels'
@@ -56,9 +57,9 @@ module.exports = {
56
57
  GIT_COMMIT_HEAD_AUTHOR_DATE,
57
58
  GIT_COMMIT_HEAD_AUTHOR_EMAIL,
58
59
  GIT_COMMIT_HEAD_AUTHOR_NAME,
59
- GIT_COMMIT_HEAD_COMMITER_DATE,
60
- GIT_COMMIT_HEAD_COMMITER_EMAIL,
61
- GIT_COMMIT_HEAD_COMMITER_NAME,
60
+ GIT_COMMIT_HEAD_COMMITTER_DATE,
61
+ GIT_COMMIT_HEAD_COMMITTER_EMAIL,
62
+ GIT_COMMIT_HEAD_COMMITTER_NAME,
62
63
  GIT_PULL_REQUEST_BASE_BRANCH_SHA,
63
64
  GIT_PULL_REQUEST_BASE_BRANCH,
64
65
  CI_PIPELINE_ID,
@@ -69,6 +70,7 @@ module.exports = {
69
70
  CI_WORKSPACE_PATH,
70
71
  CI_JOB_URL,
71
72
  CI_JOB_NAME,
73
+ CI_JOB_ID,
72
74
  CI_STAGE_NAME,
73
75
  CI_ENV_VARS,
74
76
  CI_NODE_NAME,
@@ -375,10 +375,10 @@ function createPossionProcessSamplingFilter (samplingIntervalMillis) {
375
375
  * source with a sampling event filter and an event serializer.
376
376
  */
377
377
  class EventsProfiler {
378
- constructor (options = {}) {
379
- this.type = 'events'
380
- this.eventSerializer = new EventSerializer()
378
+ type = 'events'
379
+ eventSerializer = new EventSerializer()
381
380
 
381
+ constructor (options = {}) {
382
382
  const eventHandler = event => this.eventSerializer.addEvent(event)
383
383
  const eventFilter = options.timelineSamplingEnabled
384
384
  // options.samplingInterval comes in microseconds, we need millis
@@ -8,13 +8,14 @@ function strategiesToCallbackMode (strategies, callbackMode) {
8
8
  }
9
9
 
10
10
  class NativeSpaceProfiler {
11
+ type = 'space'
12
+ _pprof
13
+ _started = false
14
+
11
15
  constructor (options = {}) {
12
- this.type = 'space'
13
16
  this._samplingInterval = options.heapSamplingInterval || 512 * 1024
14
17
  this._stackDepth = options.stackDepth || 64
15
- this._pprof = undefined
16
18
  this._oomMonitoring = options.oomMonitoring || {}
17
- this._started = false
18
19
  }
19
20
 
20
21
  start ({ mapper, nearOOMCallback } = {}) {