dd-trace 5.59.0 → 5.60.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. package/package.json +6 -3
  2. package/packages/datadog-instrumentations/src/apollo-server.js +14 -3
  3. package/packages/datadog-instrumentations/src/openai.js +13 -114
  4. package/packages/datadog-plugin-google-cloud-vertexai/src/tracing.js +3 -155
  5. package/packages/datadog-plugin-openai/src/stream-helpers.js +114 -0
  6. package/packages/datadog-plugin-openai/src/tracing.js +38 -0
  7. package/packages/dd-trace/src/appsec/reporter.js +3 -15
  8. package/packages/dd-trace/src/appsec/waf/index.js +20 -1
  9. package/packages/dd-trace/src/ci-visibility/dynamic-instrumentation/index.js +2 -1
  10. package/packages/dd-trace/src/config.js +0 -16
  11. package/packages/dd-trace/src/debugger/config.js +16 -0
  12. package/packages/dd-trace/src/debugger/devtools_client/breakpoints.js +1 -1
  13. package/packages/dd-trace/src/debugger/devtools_client/config.js +2 -6
  14. package/packages/dd-trace/src/debugger/devtools_client/index.js +1 -1
  15. package/packages/dd-trace/src/debugger/devtools_client/log.js +19 -0
  16. package/packages/dd-trace/src/debugger/devtools_client/remote_config.js +1 -1
  17. package/packages/dd-trace/src/debugger/devtools_client/send.js +1 -1
  18. package/packages/dd-trace/src/debugger/devtools_client/snapshot/index.js +1 -1
  19. package/packages/dd-trace/src/debugger/devtools_client/state.js +1 -1
  20. package/packages/dd-trace/src/debugger/devtools_client/status.js +1 -1
  21. package/packages/dd-trace/src/debugger/index.js +13 -3
  22. package/packages/dd-trace/src/plugins/util/ci.js +15 -7
  23. package/packages/dd-trace/src/plugins/util/git.js +6 -6
  24. package/packages/dd-trace/src/plugins/util/tags.js +8 -6
  25. package/packages/dd-trace/src/remote_config/capabilities.js +2 -1
  26. package/packages/dd-trace/src/remote_config/index.js +2 -0
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "dd-trace",
3
- "version": "5.59.0",
3
+ "version": "5.60.0",
4
4
  "description": "Datadog APM tracing client for JavaScript",
5
5
  "main": "index.js",
6
6
  "typings": "index.d.ts",
@@ -114,7 +114,7 @@
114
114
  ],
115
115
  "dependencies": {
116
116
  "@datadog/libdatadog": "0.7.0",
117
- "@datadog/native-appsec": "10.0.0",
117
+ "@datadog/native-appsec": "10.0.1",
118
118
  "@datadog/native-iast-taint-tracking": "4.0.0",
119
119
  "@datadog/native-metrics": "3.1.1",
120
120
  "@datadog/pprof": "5.9.0",
@@ -173,7 +173,7 @@
173
173
  "graphql": "*",
174
174
  "jszip": "^3.10.1",
175
175
  "mocha": "^11.6.0",
176
- "multer": "^2.0.1",
176
+ "multer": "^2.0.2",
177
177
  "nock": "^11.9.1",
178
178
  "nyc": "^15.1.0",
179
179
  "octokit": "^5.0.3",
@@ -187,5 +187,8 @@
187
187
  "workerpool": "^9.2.0",
188
188
  "yaml": "^2.8.0",
189
189
  "yarn-deduplicate": "^6.0.2"
190
+ },
191
+ "resolutions": {
192
+ "eslint-plugin-unicorn/@eslint/plugin-kit": "0.3.3"
190
193
  }
191
194
  }
@@ -77,6 +77,17 @@ function apolloServerHook (apolloServer) {
77
77
  return apolloServer
78
78
  }
79
79
 
80
- addHook({ name: '@apollo/server', file: 'dist/cjs/ApolloServer.js', versions: ['>=4.0.0'] }, apolloServerHook)
81
- addHook({ name: '@apollo/server', file: 'dist/cjs/express4/index.js', versions: ['>=4.0.0'] }, apolloExpress4Hook)
82
- addHook({ name: '@apollo/server', file: 'dist/cjs/utils/HeaderMap.js', versions: ['>=4.0.0'] }, apolloHeaderMapHook)
80
+ addHook(
81
+ { name: '@apollo/server', file: 'dist/cjs/ApolloServer.js', versions: ['>=4.0.0 <5.0.0'] },
82
+ apolloServerHook
83
+ )
84
+
85
+ addHook(
86
+ { name: '@apollo/server', file: 'dist/cjs/express4/index.js', versions: ['>=4.0.0 <5.0.0'] },
87
+ apolloExpress4Hook
88
+ )
89
+
90
+ addHook(
91
+ { name: '@apollo/server', file: 'dist/cjs/utils/HeaderMap.js', versions: ['>=4.0.0 <5.0.0'] },
92
+ apolloHeaderMapHook
93
+ )
@@ -5,6 +5,7 @@ const shimmer = require('../../datadog-shimmer')
5
5
 
6
6
  const dc = require('dc-polyfill')
7
7
  const ch = dc.tracingChannel('apm:openai:request')
8
+ const onStreamedChunkCh = dc.channel('apm:openai:request:chunk')
8
9
 
9
10
  const V4_PACKAGE_SHIMS = [
10
11
  {
@@ -160,79 +161,12 @@ addHook({ name: 'openai', file: 'dist/api.js', versions: ['>=3.0.0 <4'] }, expor
160
161
  return exports
161
162
  })
162
163
 
163
- function addStreamedChunk (content, chunk) {
164
- content.usage = chunk.usage // add usage if it was specified to be returned
165
- for (const choice of chunk.choices) {
166
- const choiceIdx = choice.index
167
- const oldChoice = content.choices.find(choice => choice?.index === choiceIdx)
168
- if (oldChoice) {
169
- if (!oldChoice.finish_reason) {
170
- oldChoice.finish_reason = choice.finish_reason
171
- }
172
-
173
- // delta exists on chat completions
174
- const delta = choice.delta
175
-
176
- if (delta) {
177
- const content = delta.content
178
- if (content) {
179
- if (oldChoice.delta.content) { // we don't want to append to undefined
180
- oldChoice.delta.content += content
181
- } else {
182
- oldChoice.delta.content = content
183
- }
184
- }
185
- } else {
186
- const text = choice.text
187
- if (text) {
188
- if (oldChoice.text) {
189
- oldChoice.text += text
190
- } else {
191
- oldChoice.text = text
192
- }
193
- }
194
- }
195
-
196
- // tools only exist on chat completions
197
- const tools = delta && choice.delta.tool_calls
198
-
199
- if (tools) {
200
- oldChoice.delta.tool_calls = tools.map((newTool, toolIdx) => {
201
- const oldTool = oldChoice.delta.tool_calls?.[toolIdx]
202
-
203
- if (oldTool) {
204
- oldTool.function.arguments += newTool.function.arguments
205
- return oldTool
206
- }
207
-
208
- return newTool
209
- })
210
- }
211
- } else {
212
- // we don't know which choices arrive in which order
213
- content.choices[choiceIdx] = choice
214
- }
215
- }
216
- }
217
-
218
- function convertBufferstoObjects (chunks) {
219
- return Buffer
220
- .concat(chunks) // combine the buffers
221
- .toString() // stringify
222
- .split(/(?=data:)/) // split on "data:"
223
- .map(chunk => chunk.replaceAll('\n', '').slice(6)) // remove newlines and 'data: ' from the front
224
- .slice(0, -1) // remove the last [DONE] message
225
- .map(JSON.parse) // parse all of the returned objects
226
- }
227
-
228
164
  /**
229
165
  * For streamed responses, we need to accumulate all of the content in
230
166
  * the chunks, and let the combined content be the final response.
231
167
  * This way, spans look the same as when not streamed.
232
168
  */
233
- function wrapStreamIterator (response, options, n, ctx) {
234
- let processChunksAsBuffers = false
235
- let chunks = []
169
+ function wrapStreamIterator (response, options, ctx) {
236
170
  return function (itr) {
237
171
  return function () {
238
172
  const iterator = itr.apply(this, arguments)
@@ -240,39 +174,11 @@ function wrapStreamIterator (response, options, n, ctx) {
240
174
  return next.apply(this, arguments)
241
175
  .then(res => {
242
176
  const { done, value: chunk } = res
243
-
244
- if (chunk) {
245
- chunks.push(chunk)
246
- // TODO(BridgeAR): It likely depends on the options being passed
247
- // through if the stream returns buffers or not. By reading that,
248
- // we don't have to do the instanceof check anymore, which is
249
- // relatively expensive.
250
- if (chunk instanceof Buffer) {
251
- // this operation should be safe
252
- // if one chunk is a buffer (versus a plain object), the rest should be as well
253
- processChunksAsBuffers = true
254
- }
255
- }
177
+ onStreamedChunkCh.publish({ ctx, chunk, done })
256
178
 
257
179
  if (done) {
258
- let body = {}
259
- if (processChunksAsBuffers) {
260
- chunks = convertBufferstoObjects(chunks)
261
- }
262
-
263
- if (chunks.length) {
264
- // Define the initial body having all the content outside of choices from the first chunk
265
- // this will include import data like created, id, model, etc.
266
- body = { ...chunks[0], choices: Array.from({ length: n }) }
267
- // Start from the first chunk, and add its choices into the body
268
- for (const chunk_ of chunks) {
269
- addStreamedChunk(body, chunk_)
270
- }
271
- }
272
-
273
180
  finish(ctx, {
274
181
  headers: response.headers,
275
- data: body,
276
182
  request: {
277
183
  path: response.url,
278
184
  method: options.method
@@ -312,17 +218,6 @@ for (const extension of extensions) {
312
218
  // chat.completions and completions
313
219
  const stream = streamedResponse && getOption(arguments, 'stream', false)
314
220
 
315
- // we need to compute how many prompts we are sending in streamed cases for completions
316
- // not applicable for chat completiond
317
- let n
318
- if (stream) {
319
- n = getOption(arguments, 'n', 1)
320
- const prompt = getOption(arguments, 'prompt')
321
- if (Array.isArray(prompt) && typeof prompt[0] !== 'number') {
322
- n *= prompt.length
323
- }
324
- }
325
-
326
221
  const client = this._client || this.client
327
222
 
328
223
  const ctx = {
@@ -348,7 +243,7 @@ for (const extension of extensions) {
348
243
  const parsedPromise = origApiPromParse.apply(this, arguments)
349
244
  .then(body => Promise.all([this.responsePromise, body]))
350
245
 
351
- return handleUnwrappedAPIPromise(parsedPromise, ctx, stream, n)
246
+ return handleUnwrappedAPIPromise(parsedPromise, ctx, stream)
352
247
  })
353
248
 
354
249
  return unwrappedPromise
@@ -361,7 +256,7 @@ for (const extension of extensions) {
361
256
  const parsedPromise = origApiPromParse.apply(this, arguments)
362
257
  .then(body => Promise.all([this.responsePromise, body]))
363
258
 
364
- return handleUnwrappedAPIPromise(parsedPromise, ctx, stream, n)
259
+ return handleUnwrappedAPIPromise(parsedPromise, ctx, stream)
365
260
  })
366
261
 
367
262
  ch.end.publish(ctx)
@@ -375,15 +270,15 @@ for (const extension of extensions) {
375
270
  }
376
271
  }
377
272
 
378
- function handleUnwrappedAPIPromise (apiProm, ctx, stream, n) {
273
+ function handleUnwrappedAPIPromise (apiProm, ctx, stream) {
379
274
  return apiProm
380
275
  .then(([{ response, options }, body]) => {
381
276
  if (stream) {
382
277
  if (body.iterator) {
383
- shimmer.wrap(body, 'iterator', wrapStreamIterator(response, options, n, ctx))
278
+ shimmer.wrap(body, 'iterator', wrapStreamIterator(response, options, ctx))
384
279
  } else {
385
280
  shimmer.wrap(
386
- body.response.body, Symbol.asyncIterator, wrapStreamIterator(response, options, n, ctx)
281
+ body.response.body, Symbol.asyncIterator, wrapStreamIterator(response, options, ctx)
387
282
  )
388
283
  }
389
284
  } else {
@@ -412,7 +307,11 @@ function finish (ctx, response, error) {
412
307
  ch.error.publish(ctx)
413
308
  }
414
309
 
415
- ctx.result = response
310
+ // for successful streamed responses, we've already set the result on ctx.body,
311
+ // so we don't want to override it here
312
+ ctx.result ??= {}
313
+ Object.assign(ctx.result, response)
314
+
416
315
  ch.asyncEnd.publish(ctx)
417
316
  }
418
317
 
@@ -7,7 +7,6 @@ const makeUtilities = require('../../dd-trace/src/plugins/util/llm')
7
7
 
8
8
  const {
9
9
  extractModel,
10
- extractSystemInstructions
11
10
  } = require('./utils')
12
11
 
13
12
  class GoogleCloudVertexAITracingPlugin extends TracingPlugin {
@@ -23,20 +22,18 @@ class GoogleCloudVertexAITracingPlugin extends TracingPlugin {
23
22
  }
24
23
 
25
24
  bindStart (ctx) {
26
- const { instance, request, resource, stream } = ctx
25
+ const { instance, resource } = ctx
27
26
 
28
27
  const span = this.startSpan('vertexai.request', {
29
28
  service: this.config.service,
30
29
  resource,
31
30
  kind: 'client',
32
31
  meta: {
33
- [MEASURED]: 1
32
+ [MEASURED]: 1,
33
+ 'vertexai.request.model': extractModel(instance)
34
34
  }
35
35
  }, false)
36
36
 
37
- const tags = this.tagRequest(request, instance, stream, span)
38
- span.addTags(tags)
39
-
40
37
  const store = storage('legacy').getStore() || {}
41
38
  ctx.currentStore = { ...store, span }
42
39
 
@@ -47,157 +44,8 @@ class GoogleCloudVertexAITracingPlugin extends TracingPlugin {
47
44
  const span = ctx.currentStore?.span
48
45
  if (!span) return
49
46
 
50
- const { result } = ctx
51
-
52
- const response = result?.response
53
- if (response) {
54
- const tags = this.tagResponse(response, span)
55
- span.addTags(tags)
56
- }
57
-
58
47
  span.finish()
59
48
  }
60
-
61
- /**
62
- * Generate the request tags.
63
- *
64
- * @param {Object} request
65
- * @param {Object} instance
66
- * @param {boolean} stream
67
- * @param {Span} span
68
- * @returns {Object}
69
- */
70
- tagRequest (request, instance, stream, span) {
71
- const model = extractModel(instance)
72
- const tags = {
73
- 'vertexai.request.model': model
74
- }
75
-
76
- const history = instance.historyInternal
77
-
78
- let contents = typeof request === 'string' || Array.isArray(request) ? request : request.contents
79
- if (history) {
80
- contents = [...history, ...(Array.isArray(contents) ? contents : [contents])]
81
- }
82
-
83
- const generationConfig = instance.generationConfig || {}
84
- for (const key of Object.keys(generationConfig)) {
85
- const transformedKey = key.replaceAll(/([a-z0-9])([A-Z])/g, '$1_$2').toLowerCase()
86
- tags[`vertexai.request.generation_config.${transformedKey}`] = JSON.stringify(generationConfig[key])
87
- }
88
-
89
- if (stream) {
90
- tags['vertexai.request.stream'] = true
91
- }
92
-
93
- if (!this.isPromptCompletionSampled(span)) return tags
94
-
95
- const systemInstructions = extractSystemInstructions(instance)
96
-
97
- for (const [idx, systemInstruction] of systemInstructions.entries()) {
98
- tags[`vertexai.request.system_instruction.${idx}.text`] = systemInstruction
99
- }
100
-
101
- if (typeof contents === 'string') {
102
- tags['vertexai.request.contents.0.text'] = contents
103
- return tags
104
- }
105
-
106
- for (const [contentIdx, content] of contents.entries()) {
107
- this.tagRequestContent(tags, content, contentIdx)
108
- }
109
-
110
- return tags
111
- }
112
-
113
- tagRequestPart (part, tags, partIdx, contentIdx) {
114
- tags[`vertexai.request.contents.${contentIdx}.parts.${partIdx}.text`] = this.normalize(part.text)
115
-
116
- const functionCall = part.functionCall
117
- const functionResponse = part.functionResponse
118
-
119
- if (functionCall) {
120
- tags[`vertexai.request.contents.${contentIdx}.parts.${partIdx}.function_call.name`] = functionCall.name
121
- tags[`vertexai.request.contents.${contentIdx}.parts.${partIdx}.function_call.args`] =
122
- this.normalize(JSON.stringify(functionCall.args))
123
- }
124
- if (functionResponse) {
125
- tags[`vertexai.request.contents.${contentIdx}.parts.${partIdx}.function_response.name`] =
126
- functionResponse.name
127
- tags[`vertexai.request.contents.${contentIdx}.parts.${partIdx}.function_response.response`] =
128
- this.normalize(JSON.stringify(functionResponse.response))
129
- }
130
- }
131
-
132
- tagRequestContent (tags, content, contentIdx) {
133
- if (typeof content === 'string') {
134
- tags[`vertexai.request.contents.${contentIdx}.text`] = this.normalize(content)
135
- return
136
- }
137
-
138
- if (content.text || content.functionCall || content.functionResponse) {
139
- this.tagRequestPart(content, tags, 0, contentIdx)
140
- return
141
- }
142
-
143
- const { role, parts } = content
144
- if (role) {
145
- tags[`vertexai.request.contents.${contentIdx}.role`] = role
146
- }
147
-
148
- for (const [partIdx, part] of parts.entries()) {
149
- this.tagRequestPart(part, tags, partIdx, contentIdx)
150
- }
151
- }
152
-
153
- /**
154
- * Generate the response tags.
155
- *
156
- * @param {Object} response
157
- * @param {Span} span
158
- * @returns {Object}
159
- */
160
- tagResponse (response, span) {
161
- const tags = {}
162
- const isSampled = this.isPromptCompletionSampled(span)
163
-
164
- const candidates = response.candidates
165
- for (const [candidateIdx, candidate] of candidates.entries()) {
166
- const finishReason = candidate.finishReason
167
- if (finishReason) {
168
- tags[`vertexai.response.candidates.${candidateIdx}.finish_reason`] = finishReason
169
- }
170
- const candidateContent = candidate.content
171
- const role = candidateContent.role
172
- tags[`vertexai.response.candidates.${candidateIdx}.content.role`] = role
173
-
174
- if (!isSampled) continue
175
-
176
- const parts = candidateContent.parts
177
- for (const [partIdx, part] of parts.entries()) {
178
- const text = part.text
179
- tags[`vertexai.response.candidates.${candidateIdx}.content.parts.${partIdx}.text`] =
180
- this.normalize(String(text))
181
-
182
- const functionCall = part.functionCall
183
- if (!functionCall) continue
184
-
185
- tags[`vertexai.response.candidates.${candidateIdx}.content.parts.${partIdx}.function_call.name`] =
186
- functionCall.name
187
- tags[`vertexai.response.candidates.${candidateIdx}.content.parts.${partIdx}.function_call.args`] =
188
- this.normalize(JSON.stringify(functionCall.args))
189
- }
190
- }
191
-
192
- const tokenCounts = response.usageMetadata
193
- if (tokenCounts) {
194
- tags['vertexai.response.usage.prompt_tokens'] = tokenCounts.promptTokenCount
195
- tags['vertexai.response.usage.completion_tokens'] = tokenCounts.candidatesTokenCount
196
- tags['vertexai.response.usage.total_tokens'] = tokenCounts.totalTokenCount
197
- }
198
-
199
- return tags
200
- }
201
49
  }
202
50
 
203
51
  module.exports = GoogleCloudVertexAITracingPlugin
@@ -0,0 +1,114 @@
1
+ 'use strict'
2
+
3
+ /**
4
+ * Combines legacy OpenAI streamed chunks into a single object.
5
+ * These legacy chunks are returned as buffers instead of individual objects.
6
+ * @param {readonly Uint8Array[]} chunks
7
+ * @returns {Array<Record<string, any>>}
8
+ */
9
+ function convertBuffersToObjects (chunks) {
10
+ return Buffer
11
+ .concat(chunks) // combine the buffers
12
+ .toString() // stringify
13
+ .split(/(?=data:)/) // split on "data:"
14
+ .map(chunk => chunk.replaceAll('\n', '').slice(6)) // remove newlines and 'data: ' from the front
15
+ .slice(0, -1) // remove the last [DONE] message
16
+ .map(JSON.parse) // parse all of the returned objects
17
+ }
18
+
19
+ /**
20
+ * Common function for combining chunks with n choices into a single response body.
21
+ * The shared logic will add a new choice index entry if it doesn't exist, and otherwise
22
+ * hand off to a onChoice handler to add that choice to the previously stored choice.
23
+ *
24
+ * @param {Array<Record<string, any>>} chunks
25
+ * @param {number} n
26
+ * @param {function(Record<string, any>, Record<string, any>): void} onChoice
27
+ * @returns {Record<string, any>}
28
+ */
29
+ function constructResponseFromStreamedChunks (chunks, n, onChoice) {
30
+ const body = { ...chunks[0], choices: Array.from({ length: n }) }
31
+
32
+ for (const chunk of chunks) {
33
+ body.usage = chunk.usage
34
+ for (const choice of chunk.choices) {
35
+ const choiceIdx = choice.index
36
+ const oldChoice = body.choices.find(choice => choice?.index === choiceIdx)
37
+
38
+ if (!oldChoice) {
39
+ body.choices[choiceIdx] = choice
40
+ continue
41
+ }
42
+
43
+ if (!oldChoice.finish_reason) {
44
+ oldChoice.finish_reason = choice.finish_reason
45
+ }
46
+
47
+ onChoice(choice, oldChoice)
48
+ }
49
+ }
50
+
51
+ return body
52
+ }
53
+
54
+ /**
55
+ * Constructs the entire response from a stream of OpenAI completion chunks,
56
+ * mainly combining the text choices of each chunk into a single string per choice.
57
+ * @param {Array<Record<string, any>>} chunks
58
+ * @param {number} n the number of choices to expect in the response
59
+ * @returns {Record<string, any>}
60
+ */
61
+ function constructCompletionResponseFromStreamedChunks (chunks, n) {
62
+ return constructResponseFromStreamedChunks(chunks, n, (choice, oldChoice) => {
63
+ const text = choice.text
64
+ if (text) {
65
+ if (oldChoice.text) {
66
+ oldChoice.text += text
67
+ } else {
68
+ oldChoice.text = text
69
+ }
70
+ }
71
+ })
72
+ }
73
+
74
+ /**
75
+ * Constructs the entire response from a stream of OpenAI chat completion chunks,
76
+ * mainly combining the text choices of each chunk into a single string per choice.
77
+ * @param {Array<Record<string, any>>} chunks
78
+ * @param {number} n the number of choices to expect in the response
79
+ * @returns {Record<string, any>}
80
+ */
81
+ function constructChatCompletionResponseFromStreamedChunks (chunks, n) {
82
+ return constructResponseFromStreamedChunks(chunks, n, (choice, oldChoice) => {
83
+ const delta = choice.delta
84
+ if (!delta) return
85
+
86
+ const content = delta.content
87
+ if (content) {
88
+ if (oldChoice.delta.content) {
89
+ oldChoice.delta.content += content
90
+ } else {
91
+ oldChoice.delta.content = content
92
+ }
93
+ }
94
+
95
+ const tools = delta.tool_calls
96
+ if (!tools) return
97
+
98
+ oldChoice.delta.tool_calls = tools.map((newTool, toolIdx) => {
99
+ const oldTool = oldChoice.delta.tool_calls?.[toolIdx]
100
+ if (oldTool) {
101
+ oldTool.function.arguments += newTool.function.arguments
102
+ return oldTool
103
+ }
104
+
105
+ return newTool
106
+ })
107
+ })
108
+ }
109
+
110
+ module.exports = {
111
+ convertBuffersToObjects,
112
+ constructCompletionResponseFromStreamedChunks,
113
+ constructChatCompletionResponseFromStreamedChunks
114
+ }
@@ -10,6 +10,11 @@ const { MEASURED } = require('../../../ext/tags')
10
10
  const { estimateTokens } = require('./token-estimator')
11
11
 
12
12
  const makeUtilities = require('../../dd-trace/src/plugins/util/llm')
13
+ const {
14
+ convertBuffersToObjects,
15
+ constructCompletionResponseFromStreamedChunks,
16
+ constructChatCompletionResponseFromStreamedChunks
17
+ } = require('./stream-helpers')
13
18
 
14
19
  let normalize
15
20
 
@@ -48,6 +53,39 @@ class OpenAiTracingPlugin extends TracingPlugin {
48
53
 
49
54
  normalize = utilities.normalize
50
55
  }
56
+
57
+ this.addSub('apm:openai:request:chunk', ({ ctx, chunk, done }) => {
58
+ if (!ctx.chunks) ctx.chunks = []
59
+
60
+ if (chunk) ctx.chunks.push(chunk)
61
+ if (!done) return
62
+
63
+ let chunks = ctx.chunks
64
+ if (chunks.length === 0) return
65
+
66
+ const firstChunk = chunks[0]
67
+ // OpenAI in legacy versions returns chunked buffers instead of objects.
68
+ // These buffers will need to be combined and coalesced into a list of object chunks.
69
+ if (firstChunk instanceof Buffer) {
70
+ chunks = convertBuffersToObjects(chunks)
71
+ }
72
+
73
+ const methodName = ctx.currentStore.normalizedMethodName
74
+ let n = 1
75
+ const prompt = ctx.args[0].prompt
76
+ if (Array.isArray(prompt) && typeof prompt[0] !== 'number') {
77
+ n *= prompt.length
78
+ }
79
+
80
+ let response = {}
81
+ if (methodName === 'createCompletion') {
82
+ response = constructCompletionResponseFromStreamedChunks(chunks, n)
83
+ } else if (methodName === 'createChatCompletion') {
84
+ response = constructChatCompletionResponseFromStreamedChunks(chunks, n)
85
+ }
86
+
87
+ ctx.result = { data: response }
88
+ })
51
89
  }
52
90
 
53
91
  configure (config) {
@@ -3,7 +3,6 @@
3
3
  const dc = require('dc-polyfill')
4
4
  const zlib = require('zlib')
5
5
 
6
- const Limiter = require('../rate_limiter')
7
6
  const { storage } = require('../../../datadog-core')
8
7
  const web = require('../plugins/util/web')
9
8
  const { ipHeaderList } = require('../plugins/util/ip_extractor')
@@ -15,7 +14,6 @@ const {
15
14
  updateWafRequestsMetricTags,
16
15
  updateRaspRequestsMetricTags,
17
16
  updateRaspRuleSkippedMetricTags,
18
- updateRateLimitedMetric,
19
17
  getRequestMetrics
20
18
  } = require('./telemetry')
21
19
  const { keepTrace } = require('../priority_sampler')
@@ -31,9 +29,6 @@ const COLLECTED_REQUEST_BODY_MAX_ELEMENTS_PER_NODE = 256
31
29
 
32
30
  const telemetryLogCh = dc.channel('datadog:telemetry:log')
33
31
 
34
- // default limiter, configurable with setRateLimit()
35
- let limiter = new Limiter(100)
36
-
37
32
  const config = {
38
33
  headersExtendedCollectionEnabled: false,
39
34
  maxHeadersCollected: 0,
@@ -91,7 +86,6 @@ const NON_EXTENDED_REQUEST_HEADERS = new Set([...requestHeadersList, ...eventHea
91
86
  const NON_EXTENDED_RESPONSE_HEADERS = new Set(contentHeaderList)
92
87
 
93
88
  function init (_config) {
94
- limiter = new Limiter(_config.rateLimit)
95
89
  config.headersExtendedCollectionEnabled = _config.extendedHeadersCollection.enabled
96
90
  config.maxHeadersCollected = _config.extendedHeadersCollection.maxHeaders
97
91
  config.headersRedaction = _config.extendedHeadersCollection.redaction
@@ -325,12 +319,6 @@ function reportAttack (attackData) {
325
319
  'appsec.event': 'true'
326
320
  }
327
321
 
328
- if (limiter.isAllowed()) {
329
- keepTrace(rootSpan, ASM)
330
- } else {
331
- updateRateLimitedMetric(req)
332
- }
333
-
334
322
  // TODO: maybe add this to format.js later (to take decision as late as possible)
335
323
  if (!currentTags['_dd.origin']) {
336
324
  newTags['_dd.origin'] = 'appsec'
@@ -430,8 +418,8 @@ function isRaspAttack (events) {
430
418
  return events.some(e => e.rule?.tags?.module === 'rasp')
431
419
  }
432
420
 
433
- function isFingerprintAttribute (attribute) {
434
- return attribute.startsWith('_dd.appsec.fp')
421
+ function isSchemaAttribute (attribute) {
422
+ return attribute.startsWith('_dd.appsec.s.')
435
423
  }
436
424
 
437
425
  function reportAttributes (attributes) {
@@ -444,7 +432,7 @@ function reportAttributes (attributes) {
444
432
 
445
433
  const tags = {}
446
434
  for (let [tag, value] of Object.entries(attributes)) {
447
- if (!isFingerprintAttribute(tag)) {
435
+ if (isSchemaAttribute(tag)) {
448
436
  const gzippedValue = zlib.gzipSync(JSON.stringify(value))
449
437
  value = gzippedValue.toString('base64')
450
438
  }
@@ -3,6 +3,11 @@
3
3
  const { storage } = require('../../../../datadog-core')
4
4
  const log = require('../../log')
5
5
  const Reporter = require('../reporter')
6
+ const Limiter = require('../../rate_limiter')
7
+ const { keepTrace } = require('../../priority_sampler')
8
+ const { ASM } = require('../../standalone/product')
9
+ const web = require('../../plugins/util/web')
10
+ const { updateRateLimitedMetric } = require('../telemetry')
6
11
 
7
12
  class WafUpdateError extends Error {
8
13
  constructor (diagnosticErrors) {
@@ -12,6 +17,8 @@ class WafUpdateError extends Error {
12
17
  }
13
18
  }
14
19
 
20
+ let limiter = new Limiter(100)
21
+
15
22
  const waf = {
16
23
  wafManager: null,
17
24
  init,
@@ -27,6 +34,8 @@ const waf = {
27
34
  function init (rules, config) {
28
35
  destroy()
29
36
 
37
+ limiter = new Limiter(config.rateLimit)
38
+
30
39
  // dirty require to make startup faster for serverless
31
40
  const WAFManager = require('./waf_manager')
32
41
 
@@ -99,8 +108,18 @@ function run (data, req, raspRule) {
99
108
  }
100
109
 
101
110
  const wafContext = waf.wafManager.getWAFContext(req)
111
+ const result = wafContext.run(data, raspRule)
112
+
113
+ if (result?.keep) {
114
+ if (limiter.isAllowed()) {
115
+ const rootSpan = web.root(req)
116
+ keepTrace(rootSpan, ASM)
117
+ } else {
118
+ updateRateLimitedMetric(req)
119
+ }
120
+ }
102
121
 
103
- return wafContext.run(data, raspRule)
122
+ return result
104
123
  }
105
124
 
106
125
  function disposeContext (req) {
@@ -5,6 +5,7 @@ const { Worker, threadId: parentThreadId } = require('worker_threads')
5
5
  const { randomUUID } = require('crypto')
6
6
  const log = require('../../log')
7
7
  const { getEnvironmentVariables } = require('../../config-helper')
8
+ const getDebuggerConfig = require('../../debugger/config')
8
9
 
9
10
  const probeIdToResolveBreakpointSet = new Map()
10
11
  const probeIdToResolveBreakpointRemove = new Map()
@@ -82,7 +83,7 @@ class TestVisDynamicInstrumentation {
82
83
  DD_INSTRUMENTATION_TELEMETRY_ENABLED: 'false'
83
84
  },
84
85
  workerData: {
85
- config: this._config.serialize(),
86
+ config: getDebuggerConfig(this._config),
86
87
  parentThreadId,
87
88
  probePort: probeChannel.port1,
88
89
  configPort: configChannel.port1,
@@ -1546,22 +1546,6 @@ class Config {
1546
1546
  }
1547
1547
  }
1548
1548
  }
1549
-
1550
- // TODO: Refactor the Config class so it never produces any config objects that are incompatible with MessageChannel
1551
- /**
1552
- * Serializes the config object so it can be passed over a Worker Thread MessageChannel.
1553
- * @returns {Object} The serialized config object.
1554
- */
1555
- serialize () {
1556
- // URL objects cannot be serialized over the MessageChannel, so we need to convert them to strings first
1557
- if (this.url instanceof URL) {
1558
- const config = { ...this }
1559
- config.url = this.url.toString()
1560
- return config
1561
- }
1562
-
1563
- return this
1564
- }
1565
1549
  }
1566
1550
 
1567
1551
  function handleOtel (tagString) {
@@ -0,0 +1,16 @@
1
+ 'use strict'
2
+
3
+ module.exports = function getDebuggerConfig (config) {
4
+ return {
5
+ commitSHA: config.commitSHA,
6
+ debug: config.debug,
7
+ dynamicInstrumentation: config.dynamicInstrumentation,
8
+ hostname: config.hostname,
9
+ logLevel: config.logLevel,
10
+ port: config.port,
11
+ repositoryUrl: config.repositoryUrl,
12
+ runtimeId: config.tags['runtime-id'],
13
+ service: config.service,
14
+ url: config.url?.toString(),
15
+ }
16
+ }
@@ -12,7 +12,7 @@ const {
12
12
  breakpointToProbes,
13
13
  probeToLocation
14
14
  } = require('./state')
15
- const log = require('../../log')
15
+ const log = require('./log')
16
16
 
17
17
  let sessionStarted = false
18
18
  const probes = new Map()
@@ -2,14 +2,10 @@
2
2
 
3
3
  const { workerData: { config: parentConfig, parentThreadId, configPort } } = require('node:worker_threads')
4
4
  const { format } = require('node:url')
5
- const log = require('../../log')
5
+ const log = require('./log')
6
6
 
7
7
  const config = module.exports = {
8
- dynamicInstrumentation: parentConfig.dynamicInstrumentation,
9
- runtimeId: parentConfig.tags['runtime-id'],
10
- service: parentConfig.service,
11
- commitSHA: parentConfig.commitSHA,
12
- repositoryUrl: parentConfig.repositoryUrl,
8
+ ...parentConfig,
13
9
  parentThreadId,
14
10
  maxTotalPayloadSize: 5 * 1024 * 1024 // 5MB
15
11
  }
@@ -9,7 +9,7 @@ const { getStackFromCallFrames } = require('./state')
9
9
  const { ackEmitting } = require('./status')
10
10
  const { parentThreadId } = require('./config')
11
11
  const { MAX_SNAPSHOTS_PER_SECOND_GLOBALLY } = require('./defaults')
12
- const log = require('../../log')
12
+ const log = require('./log')
13
13
  const { version } = require('../../../../../package.json')
14
14
  const { NODE_MAJOR } = require('../../../../../version')
15
15
 
@@ -0,0 +1,19 @@
1
+ 'use strict'
2
+
3
+ const { workerData } = require('node:worker_threads')
4
+
5
+ // For testing purposes, we allow `workerData` to be undefined and fallback to a default config
6
+ const { config: { debug, logLevel }, logPort } = workerData ?? { config: { debug: false } }
7
+
8
+ const LEVELS = ['error', 'warn', 'info', 'debug']
9
+ const on = (level, ...args) => {
10
+ if (typeof args[0] === 'function') {
11
+ args = [args[0]()]
12
+ }
13
+ logPort.postMessage({ level, args })
14
+ }
15
+ const off = () => {}
16
+
17
+ for (const level of LEVELS) {
18
+ module.exports[level] = debug && LEVELS.indexOf(logLevel) >= LEVELS.indexOf(level) ? on.bind(null, level) : off
19
+ }
@@ -3,7 +3,7 @@
3
3
  const { workerData: { probePort } } = require('node:worker_threads')
4
4
  const { addBreakpoint, removeBreakpoint, modifyBreakpoint } = require('./breakpoints')
5
5
  const { ackReceived, ackInstalled, ackError } = require('./status')
6
- const log = require('../../log')
6
+ const log = require('./log')
7
7
 
8
8
  // Example log line probe (simplified):
9
9
  // {
@@ -7,7 +7,7 @@ const config = require('./config')
7
7
  const JSONBuffer = require('./json-buffer')
8
8
  const request = require('../../exporters/common/request')
9
9
  const { GIT_COMMIT_SHA, GIT_REPOSITORY_URL } = require('../../plugins/util/tags')
10
- const log = require('../../log')
10
+ const log = require('./log')
11
11
  const { version } = require('../../../../../package.json')
12
12
  const { getEnvironmentVariable } = require('../../config-helper')
13
13
 
@@ -2,7 +2,7 @@
2
2
 
3
3
  const { getRuntimeObject } = require('./collector')
4
4
  const { processRawState } = require('./processor')
5
- const log = require('../../../log')
5
+ const log = require('../log')
6
6
 
7
7
  const DEFAULT_MAX_REFERENCE_DEPTH = 3
8
8
  const DEFAULT_MAX_COLLECTION_SIZE = 100
@@ -4,7 +4,7 @@ const { join, dirname } = require('path')
4
4
  const { normalize } = require('source-map/lib/util')
5
5
  const { loadSourceMapSync } = require('./source-maps')
6
6
  const session = require('./session')
7
- const log = require('../../log')
7
+ const log = require('./log')
8
8
 
9
9
  const WINDOWS_DRIVE_LETTER_REGEX = /[a-zA-Z]/
10
10
 
@@ -5,7 +5,7 @@ const config = require('./config')
5
5
  const JSONBuffer = require('./json-buffer')
6
6
  const request = require('../../exporters/common/request')
7
7
  const FormData = require('../../exporters/common/form-data')
8
- const log = require('../../log')
8
+ const log = require('./log')
9
9
 
10
10
  module.exports = {
11
11
  ackReceived,
@@ -4,6 +4,7 @@ const { readFile } = require('fs')
4
4
  const { types } = require('util')
5
5
  const { join } = require('path')
6
6
  const { Worker, MessageChannel, threadId: parentThreadId } = require('worker_threads')
7
+ const getDebuggerConfig = require('./config')
7
8
  const log = require('../log')
8
9
 
9
10
  let worker = null
@@ -25,6 +26,7 @@ function start (config, rc) {
25
26
 
26
27
  const rcAckCallbacks = new Map()
27
28
  const probeChannel = new MessageChannel()
29
+ const logChannel = new MessageChannel()
28
30
  configChannel = new MessageChannel()
29
31
 
30
32
  process[Symbol.for('datadog:node:util:types')] = types
@@ -54,18 +56,24 @@ function start (config, rc) {
54
56
  })
55
57
  probeChannel.port2.on('messageerror', (err) => log.error('[debugger] received "messageerror" on probe port', err))
56
58
 
59
+ logChannel.port2.on('message', ({ level, args }) => {
60
+ log[level](...args)
61
+ })
62
+ logChannel.port2.on('messageerror', (err) => log.error('[debugger] received "messageerror" on log port', err))
63
+
57
64
  worker = new Worker(
58
65
  join(__dirname, 'devtools_client', 'index.js'),
59
66
  {
60
67
  execArgv: [], // Avoid worker thread inheriting the `-r` command line argument
61
68
  env, // Avoid worker thread inheriting the `NODE_OPTIONS` environment variable (in case it contains `-r`)
62
69
  workerData: {
63
- config: config.serialize(),
70
+ config: getDebuggerConfig(config),
64
71
  parentThreadId,
65
72
  probePort: probeChannel.port1,
73
+ logPort: logChannel.port1,
66
74
  configPort: configChannel.port1
67
75
  },
68
- transferList: [probeChannel.port1, configChannel.port1]
76
+ transferList: [probeChannel.port1, logChannel.port1, configChannel.port1]
69
77
  }
70
78
  )
71
79
 
@@ -94,13 +102,15 @@ function start (config, rc) {
94
102
  worker.unref()
95
103
  probeChannel.port1.unref()
96
104
  probeChannel.port2.unref()
105
+ logChannel.port1.unref()
106
+ logChannel.port2.unref()
97
107
  configChannel.port1.unref()
98
108
  configChannel.port2.unref()
99
109
  }
100
110
 
101
111
  function configure (config) {
102
112
  if (configChannel === null) return
103
- configChannel.port2.postMessage(config.serialize())
113
+ configChannel.port2.postMessage(getDebuggerConfig(config))
104
114
  }
105
115
 
106
116
  function readProbeFile (path, cb) {
@@ -27,7 +27,8 @@ const {
27
27
  GIT_COMMIT_COMMITTER_EMAIL,
28
28
  CI_NODE_LABELS,
29
29
  CI_NODE_NAME,
30
- PR_NUMBER
30
+ PR_NUMBER,
31
+ CI_JOB_ID
31
32
  } = require('./tags')
32
33
  const { filterSensitiveInfoFromRepository } = require('./url')
33
34
  const { getEnvironmentVariable, getEnvironmentVariables } = require('../../config-helper')
@@ -210,7 +211,8 @@ module.exports = {
210
211
  [CI_NODE_LABELS]: CI_RUNNER_TAGS,
211
212
  [CI_NODE_NAME]: CI_RUNNER_ID,
212
213
  [GIT_PULL_REQUEST_BASE_BRANCH]: CI_MERGE_REQUEST_TARGET_BRANCH_NAME,
213
- [PR_NUMBER]: CI_MERGE_REQUEST_IID
214
+ [PR_NUMBER]: CI_MERGE_REQUEST_IID,
215
+ [CI_JOB_ID]: GITLAB_CI_JOB_ID
214
216
  }
215
217
  }
216
218
 
@@ -247,7 +249,8 @@ module.exports = {
247
249
  CIRCLE_WORKFLOW_ID,
248
250
  CIRCLE_BUILD_NUM,
249
251
  }),
250
- [PR_NUMBER]: CIRCLE_PR_NUMBER
252
+ [PR_NUMBER]: CIRCLE_PR_NUMBER,
253
+ [CI_JOB_ID]: CIRCLE_BUILD_NUM
251
254
  }
252
255
  }
253
256
 
@@ -298,7 +301,8 @@ module.exports = {
298
301
  GITHUB_REPOSITORY,
299
302
  GITHUB_RUN_ID,
300
303
  GITHUB_RUN_ATTEMPT
301
- })
304
+ }),
305
+ [CI_JOB_ID]: GITHUB_JOB
302
306
  }
303
307
  if (GITHUB_BASE_REF) { // `pull_request` or `pull_request_target` event
304
308
  tags[GIT_PULL_REQUEST_BASE_BRANCH] = GITHUB_BASE_REF
@@ -407,7 +411,8 @@ module.exports = {
407
411
  [CI_JOB_NAME]: SYSTEM_JOBDISPLAYNAME,
408
412
  [CI_ENV_VARS]: JSON.stringify({ SYSTEM_TEAMPROJECTID, BUILD_BUILDID, SYSTEM_JOBID }),
409
413
  [PR_NUMBER]: SYSTEM_PULLREQUEST_PULLREQUESTNUMBER,
410
- [GIT_PULL_REQUEST_BASE_BRANCH]: SYSTEM_PULLREQUEST_TARGETBRANCH
414
+ [GIT_PULL_REQUEST_BASE_BRANCH]: SYSTEM_PULLREQUEST_TARGETBRANCH,
415
+ [CI_JOB_ID]: SYSTEM_JOBID
411
416
  }
412
417
 
413
418
  if (SYSTEM_TEAMFOUNDATIONSERVERURI && SYSTEM_TEAMPROJECTID && BUILD_BUILDID) {
@@ -510,7 +515,8 @@ module.exports = {
510
515
  BUILDKITE_MESSAGE,
511
516
  BUILDKITE_AGENT_ID,
512
517
  BUILDKITE_PULL_REQUEST,
513
- BUILDKITE_PULL_REQUEST_BASE_BRANCH
518
+ BUILDKITE_PULL_REQUEST_BASE_BRANCH,
519
+ BUILDKITE_CI_JOB_ID
514
520
  } = env
515
521
 
516
522
  const extraTags = Object.keys(env).filter(envVar =>
@@ -542,6 +548,7 @@ module.exports = {
542
548
  [CI_NODE_NAME]: BUILDKITE_AGENT_ID,
543
549
  [CI_NODE_LABELS]: JSON.stringify(extraTags),
544
550
  [PR_NUMBER]: BUILDKITE_PULL_REQUEST,
551
+ [CI_JOB_ID]: BUILDKITE_CI_JOB_ID
545
552
  }
546
553
 
547
554
  if (BUILDKITE_PULL_REQUEST) {
@@ -682,7 +689,8 @@ module.exports = {
682
689
  CODEBUILD_BUILD_ARN,
683
690
  DD_PIPELINE_EXECUTION_ID,
684
691
  DD_ACTION_EXECUTION_ID
685
- })
692
+ }),
693
+ [CI_JOB_ID]: DD_ACTION_EXECUTION_ID
686
694
  }
687
695
  }
688
696
 
@@ -22,9 +22,9 @@ const {
22
22
  GIT_COMMIT_HEAD_AUTHOR_DATE,
23
23
  GIT_COMMIT_HEAD_AUTHOR_EMAIL,
24
24
  GIT_COMMIT_HEAD_AUTHOR_NAME,
25
- GIT_COMMIT_HEAD_COMMITER_DATE,
26
- GIT_COMMIT_HEAD_COMMITER_EMAIL,
27
- GIT_COMMIT_HEAD_COMMITER_NAME,
25
+ GIT_COMMIT_HEAD_COMMITTER_DATE,
26
+ GIT_COMMIT_HEAD_COMMITTER_EMAIL,
27
+ GIT_COMMIT_HEAD_COMMITTER_NAME,
28
28
  GIT_COMMIT_HEAD_MESSAGE
29
29
  } = require('./tags')
30
30
  const {
@@ -503,9 +503,9 @@ function getGitMetadata (ciMetadata) {
503
503
  tags[GIT_COMMIT_HEAD_AUTHOR_DATE] = headAuthorDate
504
504
  tags[GIT_COMMIT_HEAD_AUTHOR_EMAIL] = headAuthorEmail
505
505
  tags[GIT_COMMIT_HEAD_AUTHOR_NAME] = headAuthorName
506
- tags[GIT_COMMIT_HEAD_COMMITER_DATE] = headCommitterDate
507
- tags[GIT_COMMIT_HEAD_COMMITER_EMAIL] = headCommitterEmail
508
- tags[GIT_COMMIT_HEAD_COMMITER_NAME] = headCommitterName
506
+ tags[GIT_COMMIT_HEAD_COMMITTER_DATE] = headCommitterDate
507
+ tags[GIT_COMMIT_HEAD_COMMITTER_EMAIL] = headCommitterEmail
508
+ tags[GIT_COMMIT_HEAD_COMMITTER_NAME] = headCommitterName
509
509
  }
510
510
 
511
511
  const entries = [
@@ -16,9 +16,9 @@ const GIT_COMMIT_HEAD_MESSAGE = 'git.commit.head.message'
16
16
  const GIT_COMMIT_HEAD_AUTHOR_DATE = 'git.commit.head.author.date'
17
17
  const GIT_COMMIT_HEAD_AUTHOR_EMAIL = 'git.commit.head.author.email'
18
18
  const GIT_COMMIT_HEAD_AUTHOR_NAME = 'git.commit.head.author.name'
19
- const GIT_COMMIT_HEAD_COMMITER_DATE = 'git.commit.head.commiter.date'
20
- const GIT_COMMIT_HEAD_COMMITER_EMAIL = 'git.commit.head.commiter.email'
21
- const GIT_COMMIT_HEAD_COMMITER_NAME = 'git.commit.head.commiter.name'
19
+ const GIT_COMMIT_HEAD_COMMITTER_DATE = 'git.commit.head.committer.date'
20
+ const GIT_COMMIT_HEAD_COMMITTER_EMAIL = 'git.commit.head.committer.email'
21
+ const GIT_COMMIT_HEAD_COMMITTER_NAME = 'git.commit.head.committer.name'
22
22
 
23
23
  const GIT_PULL_REQUEST_BASE_BRANCH_SHA = 'git.pull_request.base_branch_sha'
24
24
  const GIT_PULL_REQUEST_BASE_BRANCH = 'git.pull_request.base_branch'
@@ -31,6 +31,7 @@ const CI_PROVIDER_NAME = 'ci.provider.name'
31
31
  const CI_WORKSPACE_PATH = 'ci.workspace_path'
32
32
  const CI_JOB_URL = 'ci.job.url'
33
33
  const CI_JOB_NAME = 'ci.job.name'
34
+ const CI_JOB_ID = 'ci.job.id'
34
35
  const CI_STAGE_NAME = 'ci.stage.name'
35
36
  const CI_NODE_NAME = 'ci.node.name'
36
37
  const CI_NODE_LABELS = 'ci.node.labels'
@@ -56,9 +57,9 @@ module.exports = {
56
57
  GIT_COMMIT_HEAD_AUTHOR_DATE,
57
58
  GIT_COMMIT_HEAD_AUTHOR_EMAIL,
58
59
  GIT_COMMIT_HEAD_AUTHOR_NAME,
59
- GIT_COMMIT_HEAD_COMMITER_DATE,
60
- GIT_COMMIT_HEAD_COMMITER_EMAIL,
61
- GIT_COMMIT_HEAD_COMMITER_NAME,
60
+ GIT_COMMIT_HEAD_COMMITTER_DATE,
61
+ GIT_COMMIT_HEAD_COMMITTER_EMAIL,
62
+ GIT_COMMIT_HEAD_COMMITTER_NAME,
62
63
  GIT_PULL_REQUEST_BASE_BRANCH_SHA,
63
64
  GIT_PULL_REQUEST_BASE_BRANCH,
64
65
  CI_PIPELINE_ID,
@@ -69,6 +70,7 @@ module.exports = {
69
70
  CI_WORKSPACE_PATH,
70
71
  CI_JOB_URL,
71
72
  CI_JOB_NAME,
73
+ CI_JOB_ID,
72
74
  CI_STAGE_NAME,
73
75
  CI_ENV_VARS,
74
76
  CI_NODE_NAME,
@@ -29,5 +29,6 @@ module.exports = {
29
29
  ASM_NETWORK_FINGERPRINT: 1n << 34n,
30
30
  ASM_HEADER_FINGERPRINT: 1n << 35n,
31
31
  ASM_RASP_CMDI: 1n << 37n,
32
- ASM_DD_MULTICONFIG: 1n << 42n
32
+ ASM_DD_MULTICONFIG: 1n << 42n,
33
+ ASM_TRACE_TAGGING_RULES: 1n << 43n,
33
34
  }
@@ -95,6 +95,7 @@ function enableWafUpdate (appsecConfig) {
95
95
  rc.updateCapabilities(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, true)
96
96
  rc.updateCapabilities(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, true)
97
97
  rc.updateCapabilities(RemoteConfigCapabilities.ASM_DD_MULTICONFIG, true)
98
+ rc.updateCapabilities(RemoteConfigCapabilities.ASM_TRACE_TAGGING_RULES, true)
98
99
 
99
100
  if (appsecConfig.rasp?.enabled) {
100
101
  rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SQLI, true)
@@ -132,6 +133,7 @@ function disableWafUpdate () {
132
133
  rc.updateCapabilities(RemoteConfigCapabilities.ASM_NETWORK_FINGERPRINT, false)
133
134
  rc.updateCapabilities(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, false)
134
135
  rc.updateCapabilities(RemoteConfigCapabilities.ASM_DD_MULTICONFIG, false)
136
+ rc.updateCapabilities(RemoteConfigCapabilities.ASM_TRACE_TAGGING_RULES, false)
135
137
 
136
138
  rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SQLI, false)
137
139
  rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SSRF, false)