dd-trace 5.26.0 → 5.27.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.d.ts +7 -0
- package/package.json +1 -1
- package/packages/datadog-instrumentations/src/helpers/hooks.js +3 -0
- package/packages/datadog-instrumentations/src/langchain.js +77 -0
- package/packages/datadog-instrumentations/src/next.js +19 -7
- package/packages/datadog-plugin-amqplib/src/consumer.js +1 -2
- package/packages/datadog-plugin-aws-sdk/src/services/kinesis.js +1 -2
- package/packages/datadog-plugin-aws-sdk/src/services/sqs.js +1 -2
- package/packages/datadog-plugin-google-cloud-pubsub/src/consumer.js +1 -2
- package/packages/datadog-plugin-grpc/src/server.js +1 -2
- package/packages/datadog-plugin-jest/src/index.js +1 -2
- package/packages/datadog-plugin-kafkajs/src/consumer.js +1 -2
- package/packages/datadog-plugin-langchain/src/handlers/chain.js +50 -0
- package/packages/datadog-plugin-langchain/src/handlers/default.js +53 -0
- package/packages/datadog-plugin-langchain/src/handlers/embedding.js +63 -0
- package/packages/datadog-plugin-langchain/src/handlers/language_models/chat_model.js +99 -0
- package/packages/datadog-plugin-langchain/src/handlers/language_models/index.js +48 -0
- package/packages/datadog-plugin-langchain/src/handlers/language_models/llm.js +57 -0
- package/packages/datadog-plugin-langchain/src/index.js +89 -0
- package/packages/datadog-plugin-langchain/src/tokens.js +35 -0
- package/packages/datadog-plugin-moleculer/src/server.js +1 -2
- package/packages/datadog-plugin-rhea/src/consumer.js +1 -2
- package/packages/datadog-plugin-vitest/src/index.js +1 -2
- package/packages/dd-trace/src/appsec/sdk/utils.js +21 -2
- package/packages/dd-trace/src/config.js +8 -0
- package/packages/dd-trace/src/crashtracking/crashtracker.js +2 -2
- package/packages/dd-trace/src/plugins/index.js +3 -0
- package/packages/dd-trace/src/plugins/tracing.js +2 -2
- package/packages/dd-trace/src/plugins/util/web.js +1 -1
- package/packages/dd-trace/src/profiling/exporters/agent.js +42 -5
- package/packages/dd-trace/src/profiling/profiler.js +5 -2
package/index.d.ts
CHANGED
|
@@ -179,6 +179,7 @@ interface Plugins {
|
|
|
179
179
|
"kafkajs": tracer.plugins.kafkajs
|
|
180
180
|
"knex": tracer.plugins.knex;
|
|
181
181
|
"koa": tracer.plugins.koa;
|
|
182
|
+
"langchain": tracer.plugins.langchain;
|
|
182
183
|
"mariadb": tracer.plugins.mariadb;
|
|
183
184
|
"memcached": tracer.plugins.memcached;
|
|
184
185
|
"microgateway-core": tracer.plugins.microgateway_core;
|
|
@@ -1592,6 +1593,12 @@ declare namespace tracer {
|
|
|
1592
1593
|
*/
|
|
1593
1594
|
interface kafkajs extends Instrumentation {}
|
|
1594
1595
|
|
|
1596
|
+
/**
|
|
1597
|
+
* This plugin automatically instruments the
|
|
1598
|
+
* [langchain](https://js.langchain.com/) module
|
|
1599
|
+
*/
|
|
1600
|
+
interface langchain extends Instrumentation {}
|
|
1601
|
+
|
|
1595
1602
|
/**
|
|
1596
1603
|
* This plugin automatically instruments the
|
|
1597
1604
|
* [ldapjs](https://github.com/ldapjs/node-ldapjs/) module.
|
package/package.json
CHANGED
|
@@ -19,6 +19,8 @@ module.exports = {
|
|
|
19
19
|
'@jest/test-sequencer': () => require('../jest'),
|
|
20
20
|
'@jest/transform': () => require('../jest'),
|
|
21
21
|
'@koa/router': () => require('../koa'),
|
|
22
|
+
'@langchain/core': () => require('../langchain'),
|
|
23
|
+
'@langchain/openai': () => require('../langchain'),
|
|
22
24
|
'@node-redis/client': () => require('../redis'),
|
|
23
25
|
'@opensearch-project/opensearch': () => require('../opensearch'),
|
|
24
26
|
'@opentelemetry/sdk-trace-node': () => require('../otel-sdk-trace'),
|
|
@@ -67,6 +69,7 @@ module.exports = {
|
|
|
67
69
|
koa: () => require('../koa'),
|
|
68
70
|
'koa-router': () => require('../koa'),
|
|
69
71
|
kafkajs: () => require('../kafkajs'),
|
|
72
|
+
langchain: () => require('../langchain'),
|
|
70
73
|
ldapjs: () => require('../ldapjs'),
|
|
71
74
|
'limitd-client': () => require('../limitd-client'),
|
|
72
75
|
lodash: () => require('../lodash'),
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const { addHook } = require('./helpers/instrument')
|
|
4
|
+
const shimmer = require('../../datadog-shimmer')
|
|
5
|
+
|
|
6
|
+
const tracingChannel = require('dc-polyfill').tracingChannel
|
|
7
|
+
|
|
8
|
+
const invokeTracingChannel = tracingChannel('apm:langchain:invoke')
|
|
9
|
+
|
|
10
|
+
function wrapLangChainPromise (fn, type, namespace = []) {
|
|
11
|
+
return function () {
|
|
12
|
+
if (!invokeTracingChannel.start.hasSubscribers) {
|
|
13
|
+
return fn.apply(this, arguments)
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
// Runnable interfaces have an `lc_namespace` property
|
|
17
|
+
const ns = this.lc_namespace || namespace
|
|
18
|
+
const resource = [...ns, this.constructor.name].join('.')
|
|
19
|
+
|
|
20
|
+
const ctx = {
|
|
21
|
+
args: arguments,
|
|
22
|
+
instance: this,
|
|
23
|
+
type,
|
|
24
|
+
resource
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
return invokeTracingChannel.tracePromise(fn, ctx, this, ...arguments)
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// langchain compiles into ESM and CommonJS, with ESM being the default and landing in the `.js` files
|
|
32
|
+
// however, CommonJS ends up in `cjs` files, and are required under the hood with `.cjs` files
|
|
33
|
+
// we patch each separately and explicitly to match against exports only once, and not rely on file regex matching
|
|
34
|
+
const extensions = ['js', 'cjs']
|
|
35
|
+
|
|
36
|
+
for (const extension of extensions) {
|
|
37
|
+
addHook({ name: '@langchain/core', file: `dist/runnables/base.${extension}`, versions: ['>=0.1'] }, exports => {
|
|
38
|
+
const RunnableSequence = exports.RunnableSequence
|
|
39
|
+
shimmer.wrap(RunnableSequence.prototype, 'invoke', invoke => wrapLangChainPromise(invoke, 'chain'))
|
|
40
|
+
shimmer.wrap(RunnableSequence.prototype, 'batch', batch => wrapLangChainPromise(batch, 'chain'))
|
|
41
|
+
return exports
|
|
42
|
+
})
|
|
43
|
+
|
|
44
|
+
addHook({
|
|
45
|
+
name: '@langchain/core',
|
|
46
|
+
file: `dist/language_models/chat_models.${extension}`,
|
|
47
|
+
versions: ['>=0.1']
|
|
48
|
+
}, exports => {
|
|
49
|
+
const BaseChatModel = exports.BaseChatModel
|
|
50
|
+
shimmer.wrap(
|
|
51
|
+
BaseChatModel.prototype,
|
|
52
|
+
'generate',
|
|
53
|
+
generate => wrapLangChainPromise(generate, 'chat_model')
|
|
54
|
+
)
|
|
55
|
+
return exports
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
addHook({ name: '@langchain/core', file: `dist/language_models/llms.${extension}`, versions: ['>=0.1'] }, exports => {
|
|
59
|
+
const BaseLLM = exports.BaseLLM
|
|
60
|
+
shimmer.wrap(BaseLLM.prototype, 'generate', generate => wrapLangChainPromise(generate, 'llm'))
|
|
61
|
+
return exports
|
|
62
|
+
})
|
|
63
|
+
|
|
64
|
+
addHook({ name: '@langchain/openai', file: `dist/embeddings.${extension}`, versions: ['>=0.1'] }, exports => {
|
|
65
|
+
const OpenAIEmbeddings = exports.OpenAIEmbeddings
|
|
66
|
+
|
|
67
|
+
// OpenAI (and Embeddings in general) do not define an lc_namespace
|
|
68
|
+
const namespace = ['langchain', 'embeddings', 'openai']
|
|
69
|
+
shimmer.wrap(OpenAIEmbeddings.prototype, 'embedDocuments', embedDocuments =>
|
|
70
|
+
wrapLangChainPromise(embedDocuments, 'embedding', namespace)
|
|
71
|
+
)
|
|
72
|
+
shimmer.wrap(OpenAIEmbeddings.prototype, 'embedQuery', embedQuery =>
|
|
73
|
+
wrapLangChainPromise(embedQuery, 'embedding', namespace)
|
|
74
|
+
)
|
|
75
|
+
return exports
|
|
76
|
+
})
|
|
77
|
+
}
|
|
@@ -14,8 +14,14 @@ const queryParsedChannel = channel('apm:next:query-parsed')
|
|
|
14
14
|
const requests = new WeakSet()
|
|
15
15
|
const nodeNextRequestsToNextRequests = new WeakMap()
|
|
16
16
|
|
|
17
|
+
// Next.js <= 14.2.6
|
|
17
18
|
const MIDDLEWARE_HEADER = 'x-middleware-invoke'
|
|
18
19
|
|
|
20
|
+
// Next.js >= 14.2.7
|
|
21
|
+
const NEXT_REQUEST_META = Symbol.for('NextInternalRequestMeta')
|
|
22
|
+
const META_IS_MIDDLEWARE = 'middlewareInvoke'
|
|
23
|
+
const encounteredMiddleware = new WeakSet()
|
|
24
|
+
|
|
19
25
|
function wrapHandleRequest (handleRequest) {
|
|
20
26
|
return function (req, res, pathname, query) {
|
|
21
27
|
return instrument(req, res, () => handleRequest.apply(this, arguments))
|
|
@@ -111,6 +117,11 @@ function getPageFromPath (page, dynamicRoutes = []) {
|
|
|
111
117
|
return getPagePath(page)
|
|
112
118
|
}
|
|
113
119
|
|
|
120
|
+
function getRequestMeta (req, key) {
|
|
121
|
+
const meta = req[NEXT_REQUEST_META] || {}
|
|
122
|
+
return typeof key === 'string' ? meta[key] : meta
|
|
123
|
+
}
|
|
124
|
+
|
|
114
125
|
function instrument (req, res, error, handler) {
|
|
115
126
|
if (typeof error === 'function') {
|
|
116
127
|
handler = error
|
|
@@ -121,8 +132,9 @@ function instrument (req, res, error, handler) {
|
|
|
121
132
|
res = res.originalResponse || res
|
|
122
133
|
|
|
123
134
|
// TODO support middleware properly in the future?
|
|
124
|
-
const isMiddleware = req.headers[MIDDLEWARE_HEADER]
|
|
125
|
-
if (isMiddleware || requests.has(req)) {
|
|
135
|
+
const isMiddleware = req.headers[MIDDLEWARE_HEADER] || getRequestMeta(req, META_IS_MIDDLEWARE)
|
|
136
|
+
if ((isMiddleware && !encounteredMiddleware.has(req)) || requests.has(req)) {
|
|
137
|
+
encounteredMiddleware.add(req)
|
|
126
138
|
if (error) {
|
|
127
139
|
errorChannel.publish({ error })
|
|
128
140
|
}
|
|
@@ -188,7 +200,7 @@ function finish (ctx, result, err) {
|
|
|
188
200
|
// however, it is not provided as a class function or exported property
|
|
189
201
|
addHook({
|
|
190
202
|
name: 'next',
|
|
191
|
-
versions: ['>=13.3.0
|
|
203
|
+
versions: ['>=13.3.0'],
|
|
192
204
|
file: 'dist/server/web/spec-extension/adapters/next-request.js'
|
|
193
205
|
}, NextRequestAdapter => {
|
|
194
206
|
shimmer.wrap(NextRequestAdapter.NextRequestAdapter, 'fromNodeNextRequest', fromNodeNextRequest => {
|
|
@@ -203,7 +215,7 @@ addHook({
|
|
|
203
215
|
|
|
204
216
|
addHook({
|
|
205
217
|
name: 'next',
|
|
206
|
-
versions: ['>=11.1
|
|
218
|
+
versions: ['>=11.1'],
|
|
207
219
|
file: 'dist/server/serve-static.js'
|
|
208
220
|
}, serveStatic => shimmer.wrap(serveStatic, 'serveStatic', wrapServeStatic))
|
|
209
221
|
|
|
@@ -213,7 +225,7 @@ addHook({
|
|
|
213
225
|
file: 'dist/next-server/server/serve-static.js'
|
|
214
226
|
}, serveStatic => shimmer.wrap(serveStatic, 'serveStatic', wrapServeStatic))
|
|
215
227
|
|
|
216
|
-
addHook({ name: 'next', versions: ['>=11.1
|
|
228
|
+
addHook({ name: 'next', versions: ['>=11.1'], file: 'dist/server/next-server.js' }, nextServer => {
|
|
217
229
|
const Server = nextServer.default
|
|
218
230
|
|
|
219
231
|
shimmer.wrap(Server.prototype, 'handleRequest', wrapHandleRequest)
|
|
@@ -230,7 +242,7 @@ addHook({ name: 'next', versions: ['>=11.1 <14.2.7'], file: 'dist/server/next-se
|
|
|
230
242
|
})
|
|
231
243
|
|
|
232
244
|
// `handleApiRequest` changes parameters/implementation at 13.2.0
|
|
233
|
-
addHook({ name: 'next', versions: ['>=13.2
|
|
245
|
+
addHook({ name: 'next', versions: ['>=13.2'], file: 'dist/server/next-server.js' }, nextServer => {
|
|
234
246
|
const Server = nextServer.default
|
|
235
247
|
shimmer.wrap(Server.prototype, 'handleApiRequest', wrapHandleApiRequestWithMatch)
|
|
236
248
|
return nextServer
|
|
@@ -264,7 +276,7 @@ addHook({
|
|
|
264
276
|
|
|
265
277
|
addHook({
|
|
266
278
|
name: 'next',
|
|
267
|
-
versions: ['>=13
|
|
279
|
+
versions: ['>=13'],
|
|
268
280
|
file: 'dist/server/web/spec-extension/request.js'
|
|
269
281
|
}, request => {
|
|
270
282
|
const nextUrlDescriptor = Object.getOwnPropertyDescriptor(request.NextRequest.prototype, 'nextUrl')
|
|
@@ -42,8 +42,7 @@ class Kinesis extends BaseAwsSdkPlugin {
|
|
|
42
42
|
{},
|
|
43
43
|
this.requestTags.get(request) || {},
|
|
44
44
|
{ 'span.kind': 'server' }
|
|
45
|
-
)
|
|
46
|
-
extractedLinks: responseExtraction.maybeChildOf._links
|
|
45
|
+
)
|
|
47
46
|
}
|
|
48
47
|
span = plugin.tracer.startSpan('aws.response', options)
|
|
49
48
|
this.enter(span, store)
|
|
@@ -33,8 +33,7 @@ class Sqs extends BaseAwsSdkPlugin {
|
|
|
33
33
|
{},
|
|
34
34
|
this.requestTags.get(request) || {},
|
|
35
35
|
{ 'span.kind': 'server' }
|
|
36
|
-
)
|
|
37
|
-
extractedLinks: contextExtraction.datadogContext._links
|
|
36
|
+
)
|
|
38
37
|
}
|
|
39
38
|
parsedMessageAttributes = contextExtraction.parsedAttributes
|
|
40
39
|
span = plugin.tracer.startSpan('aws.response', options)
|
|
@@ -219,8 +219,7 @@ class JestPlugin extends CiPlugin {
|
|
|
219
219
|
[COMPONENT]: this.constructor.id,
|
|
220
220
|
...this.testEnvironmentMetadata,
|
|
221
221
|
...testSuiteMetadata
|
|
222
|
-
}
|
|
223
|
-
extractedLinks: testSessionSpanContext?._links
|
|
222
|
+
}
|
|
224
223
|
})
|
|
225
224
|
this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite')
|
|
226
225
|
if (_ddTestCodeCoverageEnabled) {
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const LangChainHandler = require('./default')
|
|
4
|
+
|
|
5
|
+
class LangChainChainHandler extends LangChainHandler {
|
|
6
|
+
getSpanStartTags (ctx) {
|
|
7
|
+
const tags = {}
|
|
8
|
+
|
|
9
|
+
if (!this.isPromptCompletionSampled()) return tags
|
|
10
|
+
|
|
11
|
+
let inputs = ctx.args?.[0]
|
|
12
|
+
inputs = Array.isArray(inputs) ? inputs : [inputs]
|
|
13
|
+
|
|
14
|
+
for (const idx in inputs) {
|
|
15
|
+
const input = inputs[idx]
|
|
16
|
+
if (typeof input !== 'object') {
|
|
17
|
+
tags[`langchain.request.inputs.${idx}`] = this.normalize(input)
|
|
18
|
+
} else {
|
|
19
|
+
for (const [key, value] of Object.entries(input)) {
|
|
20
|
+
// these are mappings to the python client names, ie lc_kwargs
|
|
21
|
+
// only present on BaseMessage types
|
|
22
|
+
if (key.includes('lc_')) continue
|
|
23
|
+
tags[`langchain.request.inputs.${idx}.${key}`] = this.normalize(value)
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
return tags
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
getSpanEndTags (ctx) {
|
|
32
|
+
const tags = {}
|
|
33
|
+
|
|
34
|
+
if (!this.isPromptCompletionSampled()) return tags
|
|
35
|
+
|
|
36
|
+
let outputs = ctx.result
|
|
37
|
+
outputs = Array.isArray(outputs) ? outputs : [outputs]
|
|
38
|
+
|
|
39
|
+
for (const idx in outputs) {
|
|
40
|
+
const output = outputs[idx]
|
|
41
|
+
tags[`langchain.response.outputs.${idx}`] = this.normalize(
|
|
42
|
+
typeof output === 'string' ? output : JSON.stringify(output)
|
|
43
|
+
)
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
return tags
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
module.exports = LangChainChainHandler
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const Sampler = require('../../../dd-trace/src/sampler')
|
|
4
|
+
|
|
5
|
+
const RE_NEWLINE = /\n/g
|
|
6
|
+
const RE_TAB = /\t/g
|
|
7
|
+
|
|
8
|
+
// TODO: should probably refactor the OpenAI integration to use a shared LLMTracingPlugin base class
|
|
9
|
+
// This logic isn't particular to LangChain
|
|
10
|
+
class LangChainHandler {
|
|
11
|
+
constructor (config) {
|
|
12
|
+
this.config = config
|
|
13
|
+
this.sampler = new Sampler(config.spanPromptCompletionSampleRate)
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
// no-op for default handler
|
|
17
|
+
getSpanStartTags (ctx) {}
|
|
18
|
+
|
|
19
|
+
// no-op for default handler
|
|
20
|
+
getSpanEndTags (ctx) {}
|
|
21
|
+
|
|
22
|
+
// no-op for default handler
|
|
23
|
+
extractApiKey (instance) {}
|
|
24
|
+
|
|
25
|
+
// no-op for default handler
|
|
26
|
+
extractProvider (instance) {}
|
|
27
|
+
|
|
28
|
+
// no-op for default handler
|
|
29
|
+
extractModel (instance) {}
|
|
30
|
+
|
|
31
|
+
normalize (text) {
|
|
32
|
+
if (!text) return
|
|
33
|
+
if (typeof text !== 'string' || !text || (typeof text === 'string' && text.length === 0)) return
|
|
34
|
+
|
|
35
|
+
const max = this.config.spanCharLimit
|
|
36
|
+
|
|
37
|
+
text = text
|
|
38
|
+
.replace(RE_NEWLINE, '\\n')
|
|
39
|
+
.replace(RE_TAB, '\\t')
|
|
40
|
+
|
|
41
|
+
if (text.length > max) {
|
|
42
|
+
return text.substring(0, max) + '...'
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
return text
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
isPromptCompletionSampled () {
|
|
49
|
+
return this.sampler.isSampled()
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
module.exports = LangChainHandler
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const LangChainHandler = require('./default')
|
|
4
|
+
|
|
5
|
+
class LangChainEmbeddingHandler extends LangChainHandler {
|
|
6
|
+
getSpanStartTags (ctx) {
|
|
7
|
+
const tags = {}
|
|
8
|
+
|
|
9
|
+
const inputTexts = ctx.args?.[0]
|
|
10
|
+
|
|
11
|
+
const sampled = this.isPromptCompletionSampled()
|
|
12
|
+
if (typeof inputTexts === 'string') {
|
|
13
|
+
// embed query
|
|
14
|
+
if (sampled) {
|
|
15
|
+
tags['langchain.request.inputs.0.text'] = this.normalize(inputTexts)
|
|
16
|
+
}
|
|
17
|
+
tags['langchain.request.input_counts'] = 1
|
|
18
|
+
} else {
|
|
19
|
+
// embed documents
|
|
20
|
+
if (sampled) {
|
|
21
|
+
for (const idx in inputTexts) {
|
|
22
|
+
const inputText = inputTexts[idx]
|
|
23
|
+
tags[`langchain.request.inputs.${idx}.text`] = this.normalize(inputText)
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
tags['langchain.request.input_counts'] = inputTexts.length
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
return tags
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
getSpanEndTags (ctx) {
|
|
33
|
+
const tags = {}
|
|
34
|
+
|
|
35
|
+
const { result } = ctx
|
|
36
|
+
if (!Array.isArray(result)) return
|
|
37
|
+
|
|
38
|
+
tags['langchain.response.outputs.embedding_length'] = (
|
|
39
|
+
Array.isArray(result[0]) ? result[0] : result
|
|
40
|
+
).length
|
|
41
|
+
|
|
42
|
+
return tags
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
extractApiKey (instance) {
|
|
46
|
+
const apiKey = instance.clientConfig?.apiKey
|
|
47
|
+
if (!apiKey || apiKey.length < 4) return ''
|
|
48
|
+
return `...${apiKey.slice(-4)}`
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
extractProvider (instance) {
|
|
52
|
+
return instance.constructor.name.split('Embeddings')[0].toLowerCase()
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
extractModel (instance) {
|
|
56
|
+
for (const attr of ['model', 'modelName', 'modelId', 'modelKey', 'repoId']) {
|
|
57
|
+
const modelName = instance[attr]
|
|
58
|
+
if (modelName) return modelName
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
module.exports = LangChainEmbeddingHandler
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const LangChainLanguageModelHandler = require('.')
|
|
4
|
+
|
|
5
|
+
const COMPLETIONS = 'langchain.response.completions'
|
|
6
|
+
|
|
7
|
+
class LangChainChatModelHandler extends LangChainLanguageModelHandler {
|
|
8
|
+
getSpanStartTags (ctx, provider) {
|
|
9
|
+
const tags = {}
|
|
10
|
+
|
|
11
|
+
const inputs = ctx.args?.[0]
|
|
12
|
+
|
|
13
|
+
for (const messageSetIndex in inputs) {
|
|
14
|
+
const messageSet = inputs[messageSetIndex]
|
|
15
|
+
|
|
16
|
+
for (const messageIndex in messageSet) {
|
|
17
|
+
const message = messageSet[messageIndex]
|
|
18
|
+
if (this.isPromptCompletionSampled()) {
|
|
19
|
+
tags[`langchain.request.messages.${messageSetIndex}.${messageIndex}.content`] =
|
|
20
|
+
this.normalize(message.content) || ''
|
|
21
|
+
}
|
|
22
|
+
tags[`langchain.request.messages.${messageSetIndex}.${messageIndex}.message_type`] = message.constructor.name
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const instance = ctx.instance
|
|
27
|
+
const identifyingParams = (typeof instance._identifyingParams === 'function' && instance._identifyingParams()) || {}
|
|
28
|
+
for (const [param, val] of Object.entries(identifyingParams)) {
|
|
29
|
+
if (param.toLowerCase().includes('apikey') || param.toLowerCase().includes('apitoken')) continue
|
|
30
|
+
if (typeof val === 'object') {
|
|
31
|
+
for (const [key, value] of Object.entries(val)) {
|
|
32
|
+
tags[`langchain.request.${provider}.parameters.${param}.${key}`] = value
|
|
33
|
+
}
|
|
34
|
+
} else {
|
|
35
|
+
tags[`langchain.request.${provider}.parameters.${param}`] = val
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
return tags
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
getSpanEndTags (ctx) {
|
|
43
|
+
const { result } = ctx
|
|
44
|
+
|
|
45
|
+
const tags = {}
|
|
46
|
+
|
|
47
|
+
this.extractTokenMetrics(ctx.currentStore?.span, result)
|
|
48
|
+
|
|
49
|
+
for (const messageSetIdx in result.generations) {
|
|
50
|
+
const messageSet = result.generations[messageSetIdx]
|
|
51
|
+
|
|
52
|
+
for (const chatCompletionIdx in messageSet) {
|
|
53
|
+
const chatCompletion = messageSet[chatCompletionIdx]
|
|
54
|
+
|
|
55
|
+
const text = chatCompletion.text
|
|
56
|
+
const message = chatCompletion.message
|
|
57
|
+
let toolCalls = message.tool_calls
|
|
58
|
+
|
|
59
|
+
if (text && this.isPromptCompletionSampled()) {
|
|
60
|
+
tags[
|
|
61
|
+
`${COMPLETIONS}.${messageSetIdx}.${chatCompletionIdx}.content`
|
|
62
|
+
] = this.normalize(text)
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
tags[
|
|
66
|
+
`${COMPLETIONS}.${messageSetIdx}.${chatCompletionIdx}.message_type`
|
|
67
|
+
] = message.constructor.name
|
|
68
|
+
|
|
69
|
+
if (toolCalls) {
|
|
70
|
+
if (!Array.isArray(toolCalls)) {
|
|
71
|
+
toolCalls = [toolCalls]
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
for (const toolCallIndex in toolCalls) {
|
|
75
|
+
const toolCall = toolCalls[toolCallIndex]
|
|
76
|
+
|
|
77
|
+
tags[
|
|
78
|
+
`${COMPLETIONS}.${messageSetIdx}.${chatCompletionIdx}.tool_calls.${toolCallIndex}.id`
|
|
79
|
+
] = toolCall.id
|
|
80
|
+
tags[
|
|
81
|
+
`${COMPLETIONS}.${messageSetIdx}.${chatCompletionIdx}.tool_calls.${toolCallIndex}.name`
|
|
82
|
+
] = toolCall.name
|
|
83
|
+
|
|
84
|
+
const args = toolCall.args || {}
|
|
85
|
+
for (const [name, value] of Object.entries(args)) {
|
|
86
|
+
tags[
|
|
87
|
+
`${COMPLETIONS}.${messageSetIdx}.${chatCompletionIdx}.tool_calls.${toolCallIndex}.args.${name}`
|
|
88
|
+
] = this.normalize(value)
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
return tags
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
module.exports = LangChainChatModelHandler
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const { getTokensFromLlmOutput } = require('../../tokens')
|
|
4
|
+
const LangChainHandler = require('../default')
|
|
5
|
+
|
|
6
|
+
class LangChainLanguageModelHandler extends LangChainHandler {
|
|
7
|
+
extractApiKey (instance) {
|
|
8
|
+
const key = Object.keys(instance)
|
|
9
|
+
.find(key => {
|
|
10
|
+
const lower = key.toLowerCase()
|
|
11
|
+
return lower.includes('apikey') || lower.includes('apitoken')
|
|
12
|
+
})
|
|
13
|
+
|
|
14
|
+
let apiKey = instance[key]
|
|
15
|
+
if (apiKey?.secretValue && typeof apiKey.secretValue === 'function') {
|
|
16
|
+
apiKey = apiKey.secretValue()
|
|
17
|
+
}
|
|
18
|
+
if (!apiKey || apiKey.length < 4) return ''
|
|
19
|
+
return `...${apiKey.slice(-4)}`
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
extractProvider (instance) {
|
|
23
|
+
return typeof instance._llmType === 'function' && instance._llmType().split('-')[0]
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
extractModel (instance) {
|
|
27
|
+
for (const attr of ['model', 'modelName', 'modelId', 'modelKey', 'repoId']) {
|
|
28
|
+
const modelName = instance[attr]
|
|
29
|
+
if (modelName) return modelName
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
extractTokenMetrics (span, result) {
|
|
34
|
+
if (!span || !result) return
|
|
35
|
+
|
|
36
|
+
// we do not tag token metrics for non-openai providers
|
|
37
|
+
const provider = span.context()._tags['langchain.request.provider']
|
|
38
|
+
if (provider !== 'openai') return
|
|
39
|
+
|
|
40
|
+
const tokens = getTokensFromLlmOutput(result)
|
|
41
|
+
|
|
42
|
+
for (const [tokenKey, tokenCount] of Object.entries(tokens)) {
|
|
43
|
+
span.setTag(`langchain.tokens.${tokenKey}_tokens`, tokenCount)
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
module.exports = LangChainLanguageModelHandler
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const LangChainLanguageModelHandler = require('.')
|
|
4
|
+
|
|
5
|
+
class LangChainLLMHandler extends LangChainLanguageModelHandler {
|
|
6
|
+
getSpanStartTags (ctx, provider) {
|
|
7
|
+
const tags = {}
|
|
8
|
+
|
|
9
|
+
const prompts = ctx.args?.[0]
|
|
10
|
+
for (const promptIdx in prompts) {
|
|
11
|
+
if (!this.isPromptCompletionSampled()) continue
|
|
12
|
+
|
|
13
|
+
const prompt = prompts[promptIdx]
|
|
14
|
+
tags[`langchain.request.prompts.${promptIdx}.content`] = this.normalize(prompt) || ''
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const instance = ctx.instance
|
|
18
|
+
const identifyingParams = (typeof instance._identifyingParams === 'function' && instance._identifyingParams()) || {}
|
|
19
|
+
for (const [param, val] of Object.entries(identifyingParams)) {
|
|
20
|
+
if (param.toLowerCase().includes('apikey') || param.toLowerCase().includes('apitoken')) continue
|
|
21
|
+
if (typeof val === 'object') {
|
|
22
|
+
for (const [key, value] of Object.entries(val)) {
|
|
23
|
+
tags[`langchain.request.${provider}.parameters.${param}.${key}`] = value
|
|
24
|
+
}
|
|
25
|
+
} else {
|
|
26
|
+
tags[`langchain.request.${provider}.parameters.${param}`] = val
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
return tags
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
getSpanEndTags (ctx) {
|
|
34
|
+
const { result } = ctx
|
|
35
|
+
|
|
36
|
+
const tags = {}
|
|
37
|
+
|
|
38
|
+
this.extractTokenMetrics(ctx.currentStore?.span, result)
|
|
39
|
+
|
|
40
|
+
for (const completionIdx in result.generations) {
|
|
41
|
+
const completion = result.generations[completionIdx]
|
|
42
|
+
if (this.isPromptCompletionSampled()) {
|
|
43
|
+
tags[`langchain.response.completions.${completionIdx}.text`] = this.normalize(completion[0].text) || ''
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if (completion && completion[0].generationInfo) {
|
|
47
|
+
const generationInfo = completion[0].generationInfo
|
|
48
|
+
tags[`langchain.response.completions.${completionIdx}.finish_reason`] = generationInfo.finishReason
|
|
49
|
+
tags[`langchain.response.completions.${completionIdx}.logprobs`] = generationInfo.logprobs
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return tags
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
module.exports = LangChainLLMHandler
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const { MEASURED } = require('../../../ext/tags')
|
|
4
|
+
const { storage } = require('../../datadog-core')
|
|
5
|
+
const TracingPlugin = require('../../dd-trace/src/plugins/tracing')
|
|
6
|
+
|
|
7
|
+
const API_KEY = 'langchain.request.api_key'
|
|
8
|
+
const MODEL = 'langchain.request.model'
|
|
9
|
+
const PROVIDER = 'langchain.request.provider'
|
|
10
|
+
const TYPE = 'langchain.request.type'
|
|
11
|
+
|
|
12
|
+
const LangChainHandler = require('./handlers/default')
|
|
13
|
+
const LangChainChatModelHandler = require('./handlers/language_models/chat_model')
|
|
14
|
+
const LangChainLLMHandler = require('./handlers/language_models/llm')
|
|
15
|
+
const LangChainChainHandler = require('./handlers/chain')
|
|
16
|
+
const LangChainEmbeddingHandler = require('./handlers/embedding')
|
|
17
|
+
|
|
18
|
+
class LangChainPlugin extends TracingPlugin {
|
|
19
|
+
static get id () { return 'langchain' }
|
|
20
|
+
static get operation () { return 'invoke' }
|
|
21
|
+
static get system () { return 'langchain' }
|
|
22
|
+
static get prefix () {
|
|
23
|
+
return 'tracing:apm:langchain:invoke'
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
constructor () {
|
|
27
|
+
super(...arguments)
|
|
28
|
+
|
|
29
|
+
const langchainConfig = this._tracerConfig.langchain || {}
|
|
30
|
+
this.handlers = {
|
|
31
|
+
chain: new LangChainChainHandler(langchainConfig),
|
|
32
|
+
chat_model: new LangChainChatModelHandler(langchainConfig),
|
|
33
|
+
llm: new LangChainLLMHandler(langchainConfig),
|
|
34
|
+
embedding: new LangChainEmbeddingHandler(langchainConfig),
|
|
35
|
+
default: new LangChainHandler(langchainConfig)
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
bindStart (ctx) {
|
|
40
|
+
const { resource, type } = ctx
|
|
41
|
+
const handler = this.handlers[type]
|
|
42
|
+
|
|
43
|
+
const instance = ctx.instance
|
|
44
|
+
const apiKey = handler.extractApiKey(instance)
|
|
45
|
+
const provider = handler.extractProvider(instance)
|
|
46
|
+
const model = handler.extractModel(instance)
|
|
47
|
+
|
|
48
|
+
const tags = handler.getSpanStartTags(ctx, provider) || []
|
|
49
|
+
|
|
50
|
+
if (apiKey) tags[API_KEY] = apiKey
|
|
51
|
+
if (provider) tags[PROVIDER] = provider
|
|
52
|
+
if (model) tags[MODEL] = model
|
|
53
|
+
if (type) tags[TYPE] = type
|
|
54
|
+
|
|
55
|
+
const span = this.startSpan('langchain.request', {
|
|
56
|
+
service: this.config.service,
|
|
57
|
+
resource,
|
|
58
|
+
kind: 'client',
|
|
59
|
+
meta: {
|
|
60
|
+
[MEASURED]: 1,
|
|
61
|
+
...tags
|
|
62
|
+
}
|
|
63
|
+
}, false)
|
|
64
|
+
|
|
65
|
+
const store = storage.getStore() || {}
|
|
66
|
+
ctx.currentStore = { ...store, span }
|
|
67
|
+
|
|
68
|
+
return ctx.currentStore
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
asyncEnd (ctx) {
|
|
72
|
+
const span = ctx.currentStore.span
|
|
73
|
+
|
|
74
|
+
const { type } = ctx
|
|
75
|
+
|
|
76
|
+
const handler = this.handlers[type]
|
|
77
|
+
const tags = handler.getSpanEndTags(ctx) || {}
|
|
78
|
+
|
|
79
|
+
span.addTags(tags)
|
|
80
|
+
|
|
81
|
+
span.finish()
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
getHandler (type) {
|
|
85
|
+
return this.handlers[type] || this.handlers.default
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
module.exports = LangChainPlugin
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
function getTokensFromLlmOutput (result) {
|
|
4
|
+
const tokens = {
|
|
5
|
+
input: 0,
|
|
6
|
+
output: 0,
|
|
7
|
+
total: 0
|
|
8
|
+
}
|
|
9
|
+
const { llmOutput } = result
|
|
10
|
+
if (!llmOutput) return tokens
|
|
11
|
+
|
|
12
|
+
const tokenUsage = llmOutput.tokenUsage || llmOutput.usage_metadata || llmOutput.usage_metadata
|
|
13
|
+
if (!tokenUsage) return tokens
|
|
14
|
+
|
|
15
|
+
for (const tokenNames of [['input', 'prompt'], ['output', 'completion'], ['total']]) {
|
|
16
|
+
let token = 0
|
|
17
|
+
for (const tokenName of tokenNames) {
|
|
18
|
+
const underScore = `${tokenName}_tokens`
|
|
19
|
+
const camelCase = `${tokenName}Tokens`
|
|
20
|
+
|
|
21
|
+
token = tokenUsage[underScore] || tokenUsage[camelCase] || token
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
tokens[tokenNames[0]] = token
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// assign total_tokens again in case it was improperly set the first time, or was not on tokenUsage
|
|
28
|
+
tokens.total = tokens.total || tokens.input + tokens.output
|
|
29
|
+
|
|
30
|
+
return tokens
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
module.exports = {
|
|
34
|
+
getTokensFromLlmOutput
|
|
35
|
+
}
|
|
@@ -191,8 +191,7 @@ class VitestPlugin extends CiPlugin {
|
|
|
191
191
|
[COMPONENT]: this.constructor.id,
|
|
192
192
|
...this.testEnvironmentMetadata,
|
|
193
193
|
...testSuiteMetadata
|
|
194
|
-
}
|
|
195
|
-
extractedLinks: testSessionSpanContext?._links
|
|
194
|
+
}
|
|
196
195
|
})
|
|
197
196
|
this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite')
|
|
198
197
|
const store = storage.getStore()
|
|
@@ -1,8 +1,27 @@
|
|
|
1
1
|
'use strict'
|
|
2
2
|
|
|
3
3
|
function getRootSpan (tracer) {
|
|
4
|
-
|
|
5
|
-
|
|
4
|
+
let span = tracer.scope().active()
|
|
5
|
+
if (!span) return
|
|
6
|
+
|
|
7
|
+
const context = span.context()
|
|
8
|
+
const started = context._trace.started
|
|
9
|
+
|
|
10
|
+
let parentId = context._parentId
|
|
11
|
+
while (parentId) {
|
|
12
|
+
const parent = started.find(s => s.context()._spanId === parentId)
|
|
13
|
+
const pContext = parent?.context()
|
|
14
|
+
|
|
15
|
+
if (!pContext) break
|
|
16
|
+
|
|
17
|
+
parentId = pContext._parentId
|
|
18
|
+
|
|
19
|
+
if (!pContext._tags?._inferred_span) {
|
|
20
|
+
span = parent
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
return span
|
|
6
25
|
}
|
|
7
26
|
|
|
8
27
|
module.exports = {
|
|
@@ -505,6 +505,8 @@ class Config {
|
|
|
505
505
|
this._setValue(defaults, 'isGitUploadEnabled', false)
|
|
506
506
|
this._setValue(defaults, 'isIntelligentTestRunnerEnabled', false)
|
|
507
507
|
this._setValue(defaults, 'isManualApiEnabled', false)
|
|
508
|
+
this._setValue(defaults, 'langchain.spanCharLimit', 128)
|
|
509
|
+
this._setValue(defaults, 'langchain.spanPromptCompletionSampleRate', 1.0)
|
|
508
510
|
this._setValue(defaults, 'llmobs.agentlessEnabled', false)
|
|
509
511
|
this._setValue(defaults, 'llmobs.enabled', false)
|
|
510
512
|
this._setValue(defaults, 'llmobs.mlApp', undefined)
|
|
@@ -615,6 +617,8 @@ class Config {
|
|
|
615
617
|
DD_INSTRUMENTATION_TELEMETRY_ENABLED,
|
|
616
618
|
DD_INSTRUMENTATION_CONFIG_ID,
|
|
617
619
|
DD_LOGS_INJECTION,
|
|
620
|
+
DD_LANGCHAIN_SPAN_CHAR_LIMIT,
|
|
621
|
+
DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE,
|
|
618
622
|
DD_LLMOBS_AGENTLESS_ENABLED,
|
|
619
623
|
DD_LLMOBS_ENABLED,
|
|
620
624
|
DD_LLMOBS_ML_APP,
|
|
@@ -771,6 +775,10 @@ class Config {
|
|
|
771
775
|
this._setArray(env, 'injectionEnabled', DD_INJECTION_ENABLED)
|
|
772
776
|
this._setBoolean(env, 'isAzureFunction', getIsAzureFunction())
|
|
773
777
|
this._setBoolean(env, 'isGCPFunction', getIsGCPFunction())
|
|
778
|
+
this._setValue(env, 'langchain.spanCharLimit', maybeInt(DD_LANGCHAIN_SPAN_CHAR_LIMIT))
|
|
779
|
+
this._setValue(
|
|
780
|
+
env, 'langchain.spanPromptCompletionSampleRate', maybeFloat(DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE)
|
|
781
|
+
)
|
|
774
782
|
this._setBoolean(env, 'legacyBaggageEnabled', DD_TRACE_LEGACY_BAGGAGE_ENABLED)
|
|
775
783
|
this._setBoolean(env, 'llmobs.agentlessEnabled', DD_LLMOBS_AGENTLESS_ENABLED)
|
|
776
784
|
this._setBoolean(env, 'llmobs.enabled', DD_LLMOBS_ENABLED)
|
|
@@ -53,14 +53,14 @@ class Crashtracker {
|
|
|
53
53
|
// TODO: Use the string directly when deserialization is fixed.
|
|
54
54
|
url: {
|
|
55
55
|
scheme: url.protocol.slice(0, -1),
|
|
56
|
-
authority: url.protocol === 'unix'
|
|
56
|
+
authority: url.protocol === 'unix:'
|
|
57
57
|
? Buffer.from(url.pathname).toString('hex')
|
|
58
58
|
: url.host,
|
|
59
59
|
path_and_query: ''
|
|
60
60
|
},
|
|
61
61
|
timeout_ms: 3000
|
|
62
62
|
},
|
|
63
|
-
timeout_ms:
|
|
63
|
+
timeout_ms: 5000,
|
|
64
64
|
// TODO: Use `EnabledWithSymbolsInReceiver` instead for Linux when fixed.
|
|
65
65
|
resolve_frames: 'EnabledWithInprocessSymbols'
|
|
66
66
|
}
|
|
@@ -15,6 +15,8 @@ module.exports = {
|
|
|
15
15
|
get '@jest/test-sequencer' () { return require('../../../datadog-plugin-jest/src') },
|
|
16
16
|
get '@jest/transform' () { return require('../../../datadog-plugin-jest/src') },
|
|
17
17
|
get '@koa/router' () { return require('../../../datadog-plugin-koa/src') },
|
|
18
|
+
get '@langchain/core' () { return require('../../../datadog-plugin-langchain/src') },
|
|
19
|
+
get '@langchain/openai' () { return require('../../../datadog-plugin-langchain/src') },
|
|
18
20
|
get '@node-redis/client' () { return require('../../../datadog-plugin-redis/src') },
|
|
19
21
|
get '@opensearch-project/opensearch' () { return require('../../../datadog-plugin-opensearch/src') },
|
|
20
22
|
get '@redis/client' () { return require('../../../datadog-plugin-redis/src') },
|
|
@@ -52,6 +54,7 @@ module.exports = {
|
|
|
52
54
|
get koa () { return require('../../../datadog-plugin-koa/src') },
|
|
53
55
|
get 'koa-router' () { return require('../../../datadog-plugin-koa/src') },
|
|
54
56
|
get kafkajs () { return require('../../../datadog-plugin-kafkajs/src') },
|
|
57
|
+
get langchain () { return require('../../../datadog-plugin-langchain/src') },
|
|
55
58
|
get mariadb () { return require('../../../datadog-plugin-mariadb/src') },
|
|
56
59
|
get memcached () { return require('../../../datadog-plugin-memcached/src') },
|
|
57
60
|
get 'microgateway-core' () { return require('../../../datadog-plugin-microgateway-core/src') },
|
|
@@ -101,7 +101,7 @@ class TracingPlugin extends Plugin {
|
|
|
101
101
|
}
|
|
102
102
|
}
|
|
103
103
|
|
|
104
|
-
startSpan (name, { childOf, kind, meta, metrics, service, resource, type
|
|
104
|
+
startSpan (name, { childOf, kind, meta, metrics, service, resource, type } = {}, enter = true) {
|
|
105
105
|
const store = storage.getStore()
|
|
106
106
|
if (store && childOf === undefined) {
|
|
107
107
|
childOf = store.span
|
|
@@ -119,7 +119,7 @@ class TracingPlugin extends Plugin {
|
|
|
119
119
|
...metrics
|
|
120
120
|
},
|
|
121
121
|
integrationName: type,
|
|
122
|
-
links:
|
|
122
|
+
links: childOf?._links
|
|
123
123
|
})
|
|
124
124
|
|
|
125
125
|
analyticsSampler.sample(span, this.config.measured)
|
|
@@ -13,15 +13,42 @@ const os = require('os')
|
|
|
13
13
|
const { urlToHttpOptions } = require('url')
|
|
14
14
|
const perf = require('perf_hooks').performance
|
|
15
15
|
|
|
16
|
+
const telemetryMetrics = require('../../telemetry/metrics')
|
|
17
|
+
const profilersNamespace = telemetryMetrics.manager.namespace('profilers')
|
|
18
|
+
|
|
16
19
|
const containerId = docker.id()
|
|
17
20
|
|
|
21
|
+
const statusCodeCounters = []
|
|
22
|
+
const requestCounter = profilersNamespace.count('profile_api.requests', [])
|
|
23
|
+
const sizeDistribution = profilersNamespace.distribution('profile_api.bytes', [])
|
|
24
|
+
const durationDistribution = profilersNamespace.distribution('profile_api.ms', [])
|
|
25
|
+
const statusCodeErrorCounter = profilersNamespace.count('profile_api.errors', ['type:status_code'])
|
|
26
|
+
const networkErrorCounter = profilersNamespace.count('profile_api.errors', ['type:network'])
|
|
27
|
+
// TODO: implement timeout error counter when we have a way to track timeouts
|
|
28
|
+
// const timeoutErrorCounter = profilersNamespace.count('profile_api.errors', ['type:timeout'])
|
|
29
|
+
|
|
30
|
+
function countStatusCode (statusCode) {
|
|
31
|
+
let counter = statusCodeCounters[statusCode]
|
|
32
|
+
if (counter === undefined) {
|
|
33
|
+
counter = statusCodeCounters[statusCode] = profilersNamespace.count(
|
|
34
|
+
'profile_api.responses', [`status_code:${statusCode}`]
|
|
35
|
+
)
|
|
36
|
+
}
|
|
37
|
+
counter.inc()
|
|
38
|
+
}
|
|
39
|
+
|
|
18
40
|
function sendRequest (options, form, callback) {
|
|
19
41
|
const request = options.protocol === 'https:' ? httpsRequest : httpRequest
|
|
20
42
|
|
|
21
43
|
const store = storage.getStore()
|
|
22
44
|
storage.enterWith({ noop: true })
|
|
45
|
+
requestCounter.inc()
|
|
46
|
+
const start = perf.now()
|
|
23
47
|
const req = request(options, res => {
|
|
48
|
+
durationDistribution.track(perf.now() - start)
|
|
49
|
+
countStatusCode(res.statusCode)
|
|
24
50
|
if (res.statusCode >= 400) {
|
|
51
|
+
statusCodeErrorCounter.inc()
|
|
25
52
|
const error = new Error(`HTTP Error ${res.statusCode}`)
|
|
26
53
|
error.status = res.statusCode
|
|
27
54
|
callback(error)
|
|
@@ -29,14 +56,24 @@ function sendRequest (options, form, callback) {
|
|
|
29
56
|
callback(null, res)
|
|
30
57
|
}
|
|
31
58
|
})
|
|
32
|
-
|
|
33
|
-
|
|
59
|
+
|
|
60
|
+
req.on('error', (err) => {
|
|
61
|
+
networkErrorCounter.inc()
|
|
62
|
+
callback(err)
|
|
63
|
+
})
|
|
64
|
+
if (form) {
|
|
65
|
+
sizeDistribution.track(form.size())
|
|
66
|
+
form.pipe(req)
|
|
67
|
+
}
|
|
34
68
|
storage.enterWith(store)
|
|
35
69
|
}
|
|
36
70
|
|
|
37
71
|
function getBody (stream, callback) {
|
|
38
72
|
const chunks = []
|
|
39
|
-
stream.on('error',
|
|
73
|
+
stream.on('error', (err) => {
|
|
74
|
+
networkErrorCounter.inc()
|
|
75
|
+
callback(err)
|
|
76
|
+
})
|
|
40
77
|
stream.on('data', chunk => chunks.push(chunk))
|
|
41
78
|
stream.on('end', () => {
|
|
42
79
|
callback(null, Buffer.concat(chunks))
|
|
@@ -198,7 +235,7 @@ class AgentExporter {
|
|
|
198
235
|
if (err) {
|
|
199
236
|
const { status } = err
|
|
200
237
|
if ((typeof status !== 'number' || status >= 500 || status === 429) && operation.retry(err)) {
|
|
201
|
-
this._logger.
|
|
238
|
+
this._logger.warn(`Error from the agent: ${err.message}`)
|
|
202
239
|
} else {
|
|
203
240
|
reject(err)
|
|
204
241
|
}
|
|
@@ -207,7 +244,7 @@ class AgentExporter {
|
|
|
207
244
|
|
|
208
245
|
getBody(response, (err, body) => {
|
|
209
246
|
if (err) {
|
|
210
|
-
this._logger.
|
|
247
|
+
this._logger.warn(`Error reading agent response: ${err.message}`)
|
|
211
248
|
} else {
|
|
212
249
|
this._logger.debug(() => {
|
|
213
250
|
const bytes = (body.toString('hex').match(/../g) || []).join(' ')
|
|
@@ -199,8 +199,11 @@ class Profiler extends EventEmitter {
|
|
|
199
199
|
tags.snapshot = snapshotKind
|
|
200
200
|
for (const exporter of this._config.exporters) {
|
|
201
201
|
const task = exporter.export({ profiles, start, end, tags })
|
|
202
|
-
.catch(err =>
|
|
203
|
-
|
|
202
|
+
.catch(err => {
|
|
203
|
+
if (this._logger) {
|
|
204
|
+
this._logger.warn(err)
|
|
205
|
+
}
|
|
206
|
+
})
|
|
204
207
|
tasks.push(task)
|
|
205
208
|
}
|
|
206
209
|
|