dd-trace 5.50.0 → 5.52.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE-3rdparty.csv +0 -6
- package/index.d.ts +35 -1
- package/package.json +3 -10
- package/packages/datadog-instrumentations/src/confluentinc-kafka-javascript.js +391 -0
- package/packages/datadog-instrumentations/src/cucumber.js +3 -0
- package/packages/datadog-instrumentations/src/dns.js +16 -14
- package/packages/datadog-instrumentations/src/fs.js +37 -46
- package/packages/datadog-instrumentations/src/helpers/hooks.js +1 -0
- package/packages/datadog-instrumentations/src/helpers/register.js +10 -1
- package/packages/datadog-instrumentations/src/jest.js +19 -4
- package/packages/datadog-instrumentations/src/mocha/utils.js +5 -0
- package/packages/datadog-instrumentations/src/net.js +24 -28
- package/packages/datadog-instrumentations/src/pg.js +19 -5
- package/packages/datadog-instrumentations/src/playwright.js +6 -0
- package/packages/datadog-instrumentations/src/vitest.js +18 -4
- package/packages/datadog-plugin-confluentinc-kafka-javascript/src/batch-consumer.js +11 -0
- package/packages/datadog-plugin-confluentinc-kafka-javascript/src/consumer.js +11 -0
- package/packages/datadog-plugin-confluentinc-kafka-javascript/src/index.js +19 -0
- package/packages/datadog-plugin-confluentinc-kafka-javascript/src/producer.js +11 -0
- package/packages/datadog-plugin-cucumber/src/index.js +3 -0
- package/packages/datadog-plugin-cypress/src/cypress-plugin.js +3 -0
- package/packages/datadog-plugin-dns/src/lookup.js +10 -5
- package/packages/datadog-plugin-dns/src/lookup_service.js +6 -2
- package/packages/datadog-plugin-dns/src/resolve.js +5 -2
- package/packages/datadog-plugin-dns/src/reverse.js +6 -2
- package/packages/datadog-plugin-fs/src/index.js +9 -2
- package/packages/datadog-plugin-jest/src/index.js +3 -0
- package/packages/datadog-plugin-kafkajs/src/batch-consumer.js +2 -1
- package/packages/datadog-plugin-kafkajs/src/consumer.js +12 -21
- package/packages/datadog-plugin-kafkajs/src/producer.js +6 -2
- package/packages/datadog-plugin-kafkajs/src/utils.js +27 -0
- package/packages/datadog-plugin-langchain/src/index.js +0 -1
- package/packages/datadog-plugin-mocha/src/index.js +3 -0
- package/packages/datadog-plugin-net/src/ipc.js +6 -4
- package/packages/datadog-plugin-net/src/tcp.js +15 -9
- package/packages/datadog-plugin-pg/src/index.js +5 -1
- package/packages/datadog-plugin-playwright/src/index.js +3 -0
- package/packages/datadog-plugin-vitest/src/index.js +15 -3
- package/packages/dd-trace/src/appsec/api_security_sampler.js +20 -12
- package/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js +14 -9
- package/packages/dd-trace/src/appsec/index.js +1 -1
- package/packages/dd-trace/src/baggage.js +36 -0
- package/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js +4 -2
- package/packages/dd-trace/src/config.js +13 -1
- package/packages/dd-trace/src/debugger/devtools_client/breakpoints.js +2 -0
- package/packages/dd-trace/src/debugger/devtools_client/state.js +1 -1
- package/packages/dd-trace/src/exporters/common/docker.js +13 -31
- package/packages/dd-trace/src/guardrails/telemetry.js +2 -5
- package/packages/dd-trace/src/llmobs/writers/base.js +33 -12
- package/packages/dd-trace/src/noop/proxy.js +5 -0
- package/packages/dd-trace/src/opentelemetry/context_manager.js +2 -0
- package/packages/dd-trace/src/opentracing/propagation/text_map.js +17 -9
- package/packages/dd-trace/src/plugin_manager.js +2 -0
- package/packages/dd-trace/src/plugins/index.js +3 -0
- package/packages/dd-trace/src/plugins/log_plugin.js +9 -20
- package/packages/dd-trace/src/plugins/outbound.js +11 -3
- package/packages/dd-trace/src/plugins/tracing.js +8 -4
- package/packages/dd-trace/src/plugins/util/test.js +1 -1
- package/packages/dd-trace/src/profiling/exporter_cli.js +1 -1
- package/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookup.js +1 -1
- package/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookupservice.js +1 -1
- package/packages/dd-trace/src/profiling/profilers/event_plugins/dns_resolve.js +2 -2
- package/packages/dd-trace/src/profiling/profilers/event_plugins/dns_reverse.js +1 -1
- package/packages/dd-trace/src/profiling/profilers/event_plugins/event.js +15 -14
- package/packages/dd-trace/src/proxy.js +12 -0
- package/packages/dd-trace/src/service-naming/schemas/v0/messaging.js +8 -0
- package/packages/dd-trace/src/service-naming/schemas/v1/messaging.js +8 -0
- package/packages/dd-trace/src/standalone/product.js +3 -5
package/LICENSE-3rdparty.csv
CHANGED
|
@@ -38,15 +38,10 @@ dev,@eslint/eslintrc,MIT,Copyright OpenJS Foundation and other contributors, <ww
|
|
|
38
38
|
dev,@eslint/js,MIT,Copyright OpenJS Foundation and other contributors, <www.openjsf.org>
|
|
39
39
|
dev,@msgpack/msgpack,ISC,Copyright 2019 The MessagePack Community
|
|
40
40
|
dev,@stylistic/eslint-plugin-js,MIT,Copyright OpenJS Foundation and other contributors, <www.openjsf.org>
|
|
41
|
-
dev,autocannon,MIT,Copyright 2016 Matteo Collina
|
|
42
41
|
dev,axios,MIT,Copyright 2014-present Matt Zabriskie
|
|
43
42
|
dev,benchmark,MIT,Copyright 2010-2016 Mathias Bynens Robert Kieffer John-David Dalton
|
|
44
43
|
dev,body-parser,MIT,Copyright 2014 Jonathan Ong 2014-2015 Douglas Christopher Wilson
|
|
45
44
|
dev,chai,MIT,Copyright 2017 Chai.js Assertion Library
|
|
46
|
-
dev,chalk,MIT,Copyright Sindre Sorhus
|
|
47
|
-
dev,checksum,MIT,Copyright Daniel D. Shaw
|
|
48
|
-
dev,cli-table3,MIT,Copyright 2014 James Talmage
|
|
49
|
-
dev,dotenv,BSD-2-Clause,Copyright 2015 Scott Motte
|
|
50
45
|
dev,eslint,MIT,Copyright JS Foundation and other contributors https://js.foundation
|
|
51
46
|
dev,eslint-config-standard,MIT,Copyright Feross Aboukhadijeh
|
|
52
47
|
dev,eslint-plugin-import,MIT,Copyright 2015 Ben Mosher
|
|
@@ -60,7 +55,6 @@ dev,glob,ISC,Copyright Isaac Z. Schlueter and Contributors
|
|
|
60
55
|
dev,globals,MIT,Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
|
|
61
56
|
dev,graphql,MIT,Copyright 2015 Facebook Inc.
|
|
62
57
|
dev,jszip,MIT,Copyright 2015-2016 Stuart Knightley and contributors
|
|
63
|
-
dev,mkdirp,MIT,Copyright 2010 James Halliday
|
|
64
58
|
dev,mocha,MIT,Copyright 2011-2018 JS Foundation and contributors https://js.foundation
|
|
65
59
|
dev,multer,MIT,Copyright 2014 Hage Yaapa
|
|
66
60
|
dev,nock,MIT,Copyright 2017 Pedro Teixeira and other contributors
|
package/index.d.ts
CHANGED
|
@@ -138,6 +138,22 @@ interface Tracer extends opentracing.Tracer {
|
|
|
138
138
|
* LLM Observability SDK
|
|
139
139
|
*/
|
|
140
140
|
llmobs: tracer.llmobs.LLMObs;
|
|
141
|
+
|
|
142
|
+
/**
|
|
143
|
+
* @experimental
|
|
144
|
+
* Provide same functionality as OpenTelemetry Baggage:
|
|
145
|
+
* https://opentelemetry.io/docs/concepts/signals/baggage/
|
|
146
|
+
*
|
|
147
|
+
* Since the equivalent of OTel Context is implicit in dd-trace-js,
|
|
148
|
+
* these APIs act on the currently active baggage
|
|
149
|
+
*
|
|
150
|
+
* Work with storage('baggage'), therefore do not follow the same continuity as other APIs
|
|
151
|
+
*/
|
|
152
|
+
setBaggageItem (key: string, value: string): Record<string, string>;
|
|
153
|
+
getBaggageItem (key: string): string | undefined;
|
|
154
|
+
getAllBaggageItems (): Record<string, string>;
|
|
155
|
+
removeBaggageItem (key: string): Record<string, string>;
|
|
156
|
+
removeAllBaggageItems (): Record<string, string>;
|
|
141
157
|
}
|
|
142
158
|
|
|
143
159
|
// left out of the namespace, so it
|
|
@@ -154,6 +170,7 @@ interface Plugins {
|
|
|
154
170
|
"bunyan": tracer.plugins.bunyan;
|
|
155
171
|
"cassandra-driver": tracer.plugins.cassandra_driver;
|
|
156
172
|
"child_process": tracer.plugins.child_process;
|
|
173
|
+
"confluentinc-kafka-javascript": tracer.plugins.confluentinc_kafka_javascript;
|
|
157
174
|
"connect": tracer.plugins.connect;
|
|
158
175
|
"couchbase": tracer.plugins.couchbase;
|
|
159
176
|
"cucumber": tracer.plugins.cucumber;
|
|
@@ -717,7 +734,18 @@ declare namespace tracer {
|
|
|
717
734
|
*/
|
|
718
735
|
maxDepth?: number,
|
|
719
736
|
}
|
|
720
|
-
}
|
|
737
|
+
}
|
|
738
|
+
|
|
739
|
+
/**
|
|
740
|
+
* Configuration for Code Origin for Spans.
|
|
741
|
+
*/
|
|
742
|
+
codeOriginForSpans?: {
|
|
743
|
+
/**
|
|
744
|
+
* Whether to enable Code Origin for Spans.
|
|
745
|
+
* @default true
|
|
746
|
+
*/
|
|
747
|
+
enabled?: boolean
|
|
748
|
+
}
|
|
721
749
|
|
|
722
750
|
/**
|
|
723
751
|
* Configuration of the IAST. Can be a boolean as an alias to `iast.enabled`.
|
|
@@ -1333,6 +1361,12 @@ declare namespace tracer {
|
|
|
1333
1361
|
* [child_process](https://nodejs.org/api/child_process.html) module.
|
|
1334
1362
|
*/
|
|
1335
1363
|
interface child_process extends Instrumentation {}
|
|
1364
|
+
|
|
1365
|
+
/**
|
|
1366
|
+
* This plugin automatically instruments the
|
|
1367
|
+
* [confluentinc-kafka-javascript](https://github.com/confluentinc/confluent-kafka-js) module.
|
|
1368
|
+
*/
|
|
1369
|
+
interface confluentinc_kafka_javascript extends Instrumentation {}
|
|
1336
1370
|
|
|
1337
1371
|
/**
|
|
1338
1372
|
* This plugin automatically instruments the
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "dd-trace",
|
|
3
|
-
"version": "5.
|
|
3
|
+
"version": "5.52.0",
|
|
4
4
|
"description": "Datadog APM tracing client for JavaScript",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"typings": "index.d.ts",
|
|
@@ -54,7 +54,6 @@
|
|
|
54
54
|
"test:integration:selenium": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/selenium/*.spec.js\"",
|
|
55
55
|
"test:integration:vitest": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/vitest/*.spec.js\"",
|
|
56
56
|
"test:integration:profiler": "mocha --timeout 180000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/profiler/*.spec.js\"",
|
|
57
|
-
"test:integration:serverless": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/serverless/*.spec.js\"",
|
|
58
57
|
"test:integration:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/integration-test/**/*.spec.js\"",
|
|
59
58
|
"test:unit:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-instrumentations/test/@($(echo $PLUGINS)).spec.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/**/*.spec.js\" --exclude \"packages/datadog-plugin-@($(echo $PLUGINS))/test/integration-test/**/*.spec.js\"",
|
|
60
59
|
"test:shimmer": "mocha 'packages/datadog-shimmer/test/**/*.spec.js'",
|
|
@@ -86,9 +85,9 @@
|
|
|
86
85
|
"dependencies": {
|
|
87
86
|
"@datadog/libdatadog": "^0.5.1",
|
|
88
87
|
"@datadog/native-appsec": "8.5.2",
|
|
89
|
-
"@datadog/native-iast-taint-tracking": "
|
|
88
|
+
"@datadog/native-iast-taint-tracking": "4.0.0",
|
|
90
89
|
"@datadog/native-metrics": "^3.1.1",
|
|
91
|
-
"@datadog/pprof": "5.
|
|
90
|
+
"@datadog/pprof": "5.8.0",
|
|
92
91
|
"@datadog/sketches-js": "^2.1.0",
|
|
93
92
|
"@datadog/wasm-js-rewriter": "4.0.1",
|
|
94
93
|
"@isaacs/ttlcache": "^1.4.1",
|
|
@@ -125,15 +124,10 @@
|
|
|
125
124
|
"@msgpack/msgpack": "^3.0.0-beta3",
|
|
126
125
|
"@stylistic/eslint-plugin-js": "^3.0.1",
|
|
127
126
|
"@types/node": "^16.0.0",
|
|
128
|
-
"autocannon": "^4.5.2",
|
|
129
127
|
"axios": "^1.8.2",
|
|
130
128
|
"benchmark": "^2.1.4",
|
|
131
129
|
"body-parser": "^1.20.3",
|
|
132
130
|
"chai": "^4.3.7",
|
|
133
|
-
"chalk": "^5.3.0",
|
|
134
|
-
"checksum": "^1.0.0",
|
|
135
|
-
"cli-table3": "^0.6.3",
|
|
136
|
-
"dotenv": "16.3.1",
|
|
137
131
|
"eslint": "^9.19.0",
|
|
138
132
|
"eslint-config-standard": "^17.1.0",
|
|
139
133
|
"eslint-plugin-import": "^2.31.0",
|
|
@@ -147,7 +141,6 @@
|
|
|
147
141
|
"globals": "^15.10.0",
|
|
148
142
|
"graphql": "0.13.2",
|
|
149
143
|
"jszip": "^3.5.0",
|
|
150
|
-
"mkdirp": "^3.0.1",
|
|
151
144
|
"mocha": "^10",
|
|
152
145
|
"multer": "^1.4.5-lts.1",
|
|
153
146
|
"nock": "^11.3.3",
|
|
@@ -0,0 +1,391 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const {
|
|
4
|
+
addHook,
|
|
5
|
+
channel,
|
|
6
|
+
AsyncResource
|
|
7
|
+
} = require('./helpers/instrument')
|
|
8
|
+
const shimmer = require('../../datadog-shimmer')
|
|
9
|
+
|
|
10
|
+
// Create channels for Confluent Kafka JavaScript
|
|
11
|
+
const channels = {
|
|
12
|
+
producerStart: channel('apm:@confluentinc/kafka-javascript:produce:start'),
|
|
13
|
+
producerFinish: channel('apm:@confluentinc/kafka-javascript:produce:finish'),
|
|
14
|
+
producerError: channel('apm:@confluentinc/kafka-javascript:produce:error'),
|
|
15
|
+
producerCommit: channel('apm:@confluentinc/kafka-javascript:produce:commit'),
|
|
16
|
+
consumerStart: channel('apm:@confluentinc/kafka-javascript:consume:start'),
|
|
17
|
+
consumerFinish: channel('apm:@confluentinc/kafka-javascript:consume:finish'),
|
|
18
|
+
consumerError: channel('apm:@confluentinc/kafka-javascript:consume:error'),
|
|
19
|
+
consumerCommit: channel('apm:@confluentinc/kafka-javascript:consume:commit'),
|
|
20
|
+
|
|
21
|
+
// batch operations
|
|
22
|
+
batchConsumerStart: channel('apm:@confluentinc/kafka-javascript:consume-batch:start'),
|
|
23
|
+
batchConsumerFinish: channel('apm:@confluentinc/kafka-javascript:consume-batch:finish'),
|
|
24
|
+
batchConsumerError: channel('apm:@confluentinc/kafka-javascript:consume-batch:error'),
|
|
25
|
+
batchConsumerCommit: channel('apm:@confluentinc/kafka-javascript:consume-batch:commit')
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// we need to store the offset per partition per topic for the consumer to track offsets for DSM
|
|
29
|
+
const latestConsumerOffsets = new Map()
|
|
30
|
+
|
|
31
|
+
// Customize the instrumentation for Confluent Kafka JavaScript
|
|
32
|
+
addHook({ name: '@confluentinc/kafka-javascript', versions: ['>=1.0.0'] }, (module) => {
|
|
33
|
+
// Hook native module classes first
|
|
34
|
+
instrumentBaseModule(module)
|
|
35
|
+
|
|
36
|
+
// Then hook KafkaJS if it exists
|
|
37
|
+
if (module.KafkaJS) {
|
|
38
|
+
instrumentKafkaJS(module.KafkaJS)
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
return module
|
|
42
|
+
})
|
|
43
|
+
|
|
44
|
+
function instrumentBaseModule (module) {
|
|
45
|
+
// Helper function to wrap producer classes
|
|
46
|
+
function wrapProducerClass (ProducerClass, className) {
|
|
47
|
+
return shimmer.wrap(module, className, function wrapProducer (Original) {
|
|
48
|
+
return function wrappedProducer () {
|
|
49
|
+
const producer = new Original(...arguments)
|
|
50
|
+
|
|
51
|
+
// Hook the produce method
|
|
52
|
+
if (typeof producer?.produce === 'function') {
|
|
53
|
+
shimmer.wrap(producer, 'produce', function wrapProduce (produce) {
|
|
54
|
+
return function wrappedProduce (topic, partition, message, key, timestamp, opaque) {
|
|
55
|
+
if (!channels.producerStart.hasSubscribers) {
|
|
56
|
+
return produce.apply(this, arguments)
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const brokers = this.globalConfig?.['bootstrap.servers']
|
|
60
|
+
|
|
61
|
+
const asyncResource = new AsyncResource('bound-anonymous-fn')
|
|
62
|
+
return asyncResource.runInAsyncScope(() => {
|
|
63
|
+
try {
|
|
64
|
+
channels.producerStart.publish({
|
|
65
|
+
topic,
|
|
66
|
+
messages: [{ key, value: message }],
|
|
67
|
+
bootstrapServers: brokers
|
|
68
|
+
})
|
|
69
|
+
|
|
70
|
+
const result = produce.apply(this, arguments)
|
|
71
|
+
|
|
72
|
+
channels.producerCommit.publish(undefined)
|
|
73
|
+
channels.producerFinish.publish(undefined)
|
|
74
|
+
return result
|
|
75
|
+
} catch (error) {
|
|
76
|
+
channels.producerError.publish(error)
|
|
77
|
+
channels.producerFinish.publish(undefined)
|
|
78
|
+
throw error
|
|
79
|
+
}
|
|
80
|
+
})
|
|
81
|
+
}
|
|
82
|
+
})
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return producer
|
|
86
|
+
}
|
|
87
|
+
})
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// Helper function to wrap consumer classes
|
|
91
|
+
function wrapConsumerClass (ConsumerClass, className) {
|
|
92
|
+
return shimmer.wrap(module, className, function wrapConsumer (Original) {
|
|
93
|
+
return function wrappedConsumer () {
|
|
94
|
+
const consumer = new Original(...arguments)
|
|
95
|
+
const groupId = this.groupId || (arguments[0]?.['group.id'])
|
|
96
|
+
|
|
97
|
+
// Wrap the consume method
|
|
98
|
+
if (typeof consumer?.consume === 'function') {
|
|
99
|
+
shimmer.wrap(consumer, 'consume', function wrapConsume (consume) {
|
|
100
|
+
return function wrappedConsume (numMessages, callback) {
|
|
101
|
+
if (!channels.consumerStart.hasSubscribers) {
|
|
102
|
+
return consume.apply(this, arguments)
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
if (!callback && typeof numMessages === 'function') {
|
|
106
|
+
callback = numMessages
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// Handle callback-based consumption
|
|
110
|
+
if (typeof callback === 'function') {
|
|
111
|
+
return consume.call(this, numMessages, function wrappedCallback (err, messages) {
|
|
112
|
+
if (messages && messages.length > 0) {
|
|
113
|
+
messages.forEach(message => {
|
|
114
|
+
channels.consumerStart.publish({
|
|
115
|
+
topic: message?.topic,
|
|
116
|
+
partition: message?.partition,
|
|
117
|
+
message,
|
|
118
|
+
groupId
|
|
119
|
+
})
|
|
120
|
+
updateLatestOffset(message?.topic, message?.partition, message?.offset, groupId)
|
|
121
|
+
})
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
if (err) {
|
|
125
|
+
channels.consumerError.publish(err)
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
try {
|
|
129
|
+
const result = callback.apply(this, arguments)
|
|
130
|
+
channels.consumerFinish.publish(undefined)
|
|
131
|
+
return result
|
|
132
|
+
} catch (error) {
|
|
133
|
+
channels.consumerError.publish(error)
|
|
134
|
+
channels.consumerFinish.publish(undefined)
|
|
135
|
+
throw error
|
|
136
|
+
}
|
|
137
|
+
})
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// If no callback is provided, just pass through
|
|
141
|
+
return consume.apply(this, arguments)
|
|
142
|
+
}
|
|
143
|
+
})
|
|
144
|
+
|
|
145
|
+
// Wrap the commit method for handling offset commits
|
|
146
|
+
if (consumer && typeof consumer.commit === 'function') {
|
|
147
|
+
shimmer.wrap(consumer, 'commit', wrapCommit)
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
return consumer
|
|
152
|
+
}
|
|
153
|
+
})
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
// Wrap Producer and KafkaProducer classes if they exist
|
|
157
|
+
if (typeof module.Producer === 'function') {
|
|
158
|
+
wrapProducerClass(module.Producer, 'Producer')
|
|
159
|
+
}
|
|
160
|
+
if (typeof module.KafkaProducer === 'function') {
|
|
161
|
+
wrapProducerClass(module.KafkaProducer, 'KafkaProducer')
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
// Wrap Consumer and KafkaConsumer classes if they exist
|
|
165
|
+
if (typeof module.Consumer === 'function') {
|
|
166
|
+
wrapConsumerClass(module.Consumer, 'Consumer')
|
|
167
|
+
}
|
|
168
|
+
if (typeof module.KafkaConsumer === 'function') {
|
|
169
|
+
wrapConsumerClass(module.KafkaConsumer, 'KafkaConsumer')
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
function instrumentKafkaJS (kafkaJS) {
|
|
174
|
+
// Hook the Kafka class if it exists
|
|
175
|
+
if (typeof kafkaJS?.Kafka === 'function') {
|
|
176
|
+
shimmer.wrap(kafkaJS, 'Kafka', function wrapKafka (OriginalKafka) {
|
|
177
|
+
return function KafkaWrapper (options) {
|
|
178
|
+
const kafka = new OriginalKafka(options)
|
|
179
|
+
const kafkaJSOptions = options?.kafkaJS || options
|
|
180
|
+
const brokers = kafkaJSOptions?.brokers ? kafkaJSOptions.brokers.join(',') : ''
|
|
181
|
+
|
|
182
|
+
// Store brokers for later use
|
|
183
|
+
kafka._ddBrokers = brokers
|
|
184
|
+
|
|
185
|
+
// Wrap the producer method if it exists
|
|
186
|
+
if (typeof kafka?.producer === 'function') {
|
|
187
|
+
shimmer.wrap(kafka, 'producer', function wrapProducerMethod (producerMethod) {
|
|
188
|
+
return function wrappedProducerMethod () {
|
|
189
|
+
const producer = producerMethod.apply(this, arguments)
|
|
190
|
+
|
|
191
|
+
if (!brokers && arguments?.[0]?.['bootstrap.servers']) {
|
|
192
|
+
kafka._ddBrokers = arguments[0]['bootstrap.servers']
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// Wrap the send method of the producer
|
|
196
|
+
if (producer && typeof producer.send === 'function') {
|
|
197
|
+
shimmer.wrap(producer, 'send', function wrapSend (send) {
|
|
198
|
+
return function wrappedSend (payload) {
|
|
199
|
+
if (!channels.producerStart.hasSubscribers) {
|
|
200
|
+
return send.apply(this, arguments)
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
const asyncResource = new AsyncResource('bound-anonymous-fn')
|
|
204
|
+
return asyncResource.runInAsyncScope(() => {
|
|
205
|
+
try {
|
|
206
|
+
channels.producerStart.publish({
|
|
207
|
+
topic: payload?.topic,
|
|
208
|
+
messages: payload?.messages || [],
|
|
209
|
+
bootstrapServers: kafka._ddBrokers
|
|
210
|
+
})
|
|
211
|
+
|
|
212
|
+
const result = send.apply(this, arguments)
|
|
213
|
+
|
|
214
|
+
result.then(
|
|
215
|
+
asyncResource.bind(res => {
|
|
216
|
+
channels.producerCommit.publish(res)
|
|
217
|
+
channels.producerFinish.publish(undefined)
|
|
218
|
+
}),
|
|
219
|
+
asyncResource.bind(err => {
|
|
220
|
+
if (err) {
|
|
221
|
+
channels.producerError.publish(err)
|
|
222
|
+
}
|
|
223
|
+
channels.producerFinish.publish(undefined)
|
|
224
|
+
})
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
return result
|
|
228
|
+
} catch (e) {
|
|
229
|
+
channels.producerError.publish(e)
|
|
230
|
+
channels.producerFinish.publish(undefined)
|
|
231
|
+
throw e
|
|
232
|
+
}
|
|
233
|
+
})
|
|
234
|
+
}
|
|
235
|
+
})
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
return producer
|
|
239
|
+
}
|
|
240
|
+
})
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
// Wrap the consumer method if it exists
|
|
244
|
+
if (typeof kafka?.consumer === 'function') {
|
|
245
|
+
shimmer.wrap(kafka, 'consumer', function wrapConsumerMethod (consumerMethod) {
|
|
246
|
+
return function wrappedConsumerMethod (config) {
|
|
247
|
+
const consumer = consumerMethod.apply(this, arguments)
|
|
248
|
+
const groupId = getGroupId(config)
|
|
249
|
+
|
|
250
|
+
// Wrap the run method for handling message consumption
|
|
251
|
+
if (typeof consumer?.run === 'function') {
|
|
252
|
+
shimmer.wrap(consumer, 'run', function wrapRun (run) {
|
|
253
|
+
return function wrappedRun (options) {
|
|
254
|
+
if (!channels.consumerStart.hasSubscribers) {
|
|
255
|
+
return run.apply(this, arguments)
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
const eachMessage = options?.eachMessage
|
|
259
|
+
const eachBatch = options?.eachBatch
|
|
260
|
+
if (eachMessage) {
|
|
261
|
+
options.eachMessage = wrapKafkaCallback(
|
|
262
|
+
eachMessage,
|
|
263
|
+
{
|
|
264
|
+
startCh: channels.consumerStart,
|
|
265
|
+
commitCh: channels.consumerCommit,
|
|
266
|
+
finishCh: channels.consumerFinish,
|
|
267
|
+
errorCh: channels.consumerError
|
|
268
|
+
},
|
|
269
|
+
(payload) => {
|
|
270
|
+
return {
|
|
271
|
+
topic: payload?.topic,
|
|
272
|
+
partition: payload?.partition,
|
|
273
|
+
offset: payload?.message?.offset,
|
|
274
|
+
message: payload?.message,
|
|
275
|
+
groupId
|
|
276
|
+
}
|
|
277
|
+
})
|
|
278
|
+
} else if (eachBatch) {
|
|
279
|
+
options.eachBatch = wrapKafkaCallback(
|
|
280
|
+
eachBatch,
|
|
281
|
+
{
|
|
282
|
+
startCh: channels.batchConsumerStart,
|
|
283
|
+
commitCh: channels.batchConsumerCommit,
|
|
284
|
+
finishCh: channels.batchConsumerFinish,
|
|
285
|
+
errorCh: channels.batchConsumerError
|
|
286
|
+
},
|
|
287
|
+
(payload) => {
|
|
288
|
+
const { batch } = payload
|
|
289
|
+
return {
|
|
290
|
+
topic: batch?.topic,
|
|
291
|
+
partition: batch?.partition,
|
|
292
|
+
offset: batch?.messages[batch?.messages?.length - 1]?.offset,
|
|
293
|
+
messages: batch?.messages,
|
|
294
|
+
groupId
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
)
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
return run.apply(this, arguments)
|
|
301
|
+
}
|
|
302
|
+
})
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
// Wrap the commit method for handling offset commits
|
|
306
|
+
if (typeof consumer?.commitOffsets === 'function') {
|
|
307
|
+
shimmer.wrap(consumer, 'commitOffsets', wrapCommit)
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
return consumer
|
|
311
|
+
}
|
|
312
|
+
})
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
return kafka
|
|
316
|
+
}
|
|
317
|
+
})
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
function wrapCommit (commit) {
|
|
322
|
+
return function wrappedCommit (options) {
|
|
323
|
+
if (!channels.consumerCommit.hasSubscribers) {
|
|
324
|
+
return commit.apply(this, arguments)
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
const result = commit.apply(this, arguments)
|
|
328
|
+
channels.consumerCommit.publish(getLatestOffsets())
|
|
329
|
+
latestConsumerOffsets.clear()
|
|
330
|
+
return result
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
function wrapKafkaCallback (callback, { startCh, commitCh, finishCh, errorCh }, getPayload) {
|
|
335
|
+
return function wrappedKafkaCallback (payload) {
|
|
336
|
+
const commitPayload = getPayload(payload)
|
|
337
|
+
|
|
338
|
+
const asyncResource = new AsyncResource('bound-anonymous-fn')
|
|
339
|
+
return asyncResource.runInAsyncScope(() => {
|
|
340
|
+
startCh.publish(commitPayload)
|
|
341
|
+
|
|
342
|
+
updateLatestOffset(commitPayload?.topic, commitPayload?.partition, commitPayload?.offset, commitPayload?.groupId)
|
|
343
|
+
|
|
344
|
+
try {
|
|
345
|
+
const result = callback.apply(this, arguments)
|
|
346
|
+
|
|
347
|
+
if (result && typeof result.then === 'function') {
|
|
348
|
+
return result
|
|
349
|
+
.then(asyncResource.bind(res => {
|
|
350
|
+
finishCh.publish(undefined)
|
|
351
|
+
return res
|
|
352
|
+
}))
|
|
353
|
+
.catch(asyncResource.bind(err => {
|
|
354
|
+
errorCh.publish(err)
|
|
355
|
+
finishCh.publish(undefined)
|
|
356
|
+
throw err
|
|
357
|
+
}))
|
|
358
|
+
} else {
|
|
359
|
+
finishCh.publish(undefined)
|
|
360
|
+
return result
|
|
361
|
+
}
|
|
362
|
+
} catch (error) {
|
|
363
|
+
errorCh.publish(error)
|
|
364
|
+
finishCh.publish(undefined)
|
|
365
|
+
throw error
|
|
366
|
+
}
|
|
367
|
+
})
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
function getGroupId (config) {
|
|
372
|
+
if (!config) return ''
|
|
373
|
+
if (config.kafkaJS?.groupId) return config.kafkaJS.groupId
|
|
374
|
+
if (config?.groupId) return config.groupId
|
|
375
|
+
if (config['group.id']) return config['group.id']
|
|
376
|
+
return ''
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
function updateLatestOffset (topic, partition, offset, groupId) {
|
|
380
|
+
const key = `${topic}:${partition}`
|
|
381
|
+
latestConsumerOffsets.set(key, {
|
|
382
|
+
topic,
|
|
383
|
+
partition,
|
|
384
|
+
offset,
|
|
385
|
+
groupId
|
|
386
|
+
})
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
function getLatestOffsets () {
|
|
390
|
+
return Array.from(latestConsumerOffsets.values())
|
|
391
|
+
}
|
|
@@ -309,6 +309,7 @@ function wrapRun (pl, isLatestVersion) {
|
|
|
309
309
|
let isAttemptToFixRetry = false
|
|
310
310
|
let hasFailedAllRetries = false
|
|
311
311
|
let hasPassedAllRetries = false
|
|
312
|
+
let hasFailedAttemptToFix = false
|
|
312
313
|
let isDisabled = false
|
|
313
314
|
let isQuarantined = false
|
|
314
315
|
|
|
@@ -330,6 +331,7 @@ function wrapRun (pl, isLatestVersion) {
|
|
|
330
331
|
}, { pass: 0, fail: 0 })
|
|
331
332
|
hasFailedAllRetries = fail === testManagementAttemptToFixRetries + 1
|
|
332
333
|
hasPassedAllRetries = pass === testManagementAttemptToFixRetries + 1
|
|
334
|
+
hasFailedAttemptToFix = fail > 0
|
|
333
335
|
}
|
|
334
336
|
}
|
|
335
337
|
}
|
|
@@ -360,6 +362,7 @@ function wrapRun (pl, isLatestVersion) {
|
|
|
360
362
|
isAttemptToFixRetry,
|
|
361
363
|
hasFailedAllRetries,
|
|
362
364
|
hasPassedAllRetries,
|
|
365
|
+
hasFailedAttemptToFix,
|
|
363
366
|
isDisabled,
|
|
364
367
|
isQuarantined
|
|
365
368
|
})
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
'use strict'
|
|
2
2
|
|
|
3
|
-
const { channel, addHook
|
|
3
|
+
const { channel, addHook } = require('./helpers/instrument')
|
|
4
4
|
const shimmer = require('../../datadog-shimmer')
|
|
5
5
|
|
|
6
6
|
const rrtypes = {
|
|
@@ -53,7 +53,7 @@ function wrap (prefix, fn, expectedArgs, rrtype) {
|
|
|
53
53
|
const errorCh = channel(prefix + ':error')
|
|
54
54
|
|
|
55
55
|
const wrapped = function () {
|
|
56
|
-
const cb =
|
|
56
|
+
const cb = arguments[arguments.length - 1]
|
|
57
57
|
if (
|
|
58
58
|
!startCh.hasSubscribers ||
|
|
59
59
|
arguments.length < expectedArgs ||
|
|
@@ -62,30 +62,32 @@ function wrap (prefix, fn, expectedArgs, rrtype) {
|
|
|
62
62
|
return fn.apply(this, arguments)
|
|
63
63
|
}
|
|
64
64
|
|
|
65
|
-
const
|
|
66
|
-
|
|
65
|
+
const args = Array.from(arguments)
|
|
66
|
+
args.pop() // gets rid of the callback
|
|
67
67
|
if (rrtype) {
|
|
68
|
-
|
|
68
|
+
args.push(rrtype)
|
|
69
69
|
}
|
|
70
70
|
|
|
71
|
-
const
|
|
72
|
-
return asyncResource.runInAsyncScope(() => {
|
|
73
|
-
startCh.publish(startArgs)
|
|
71
|
+
const ctx = { args }
|
|
74
72
|
|
|
75
|
-
|
|
73
|
+
return startCh.runStores(ctx, () => {
|
|
74
|
+
arguments[arguments.length - 1] = shimmer.wrapFunction(cb, cb => function (error, result, ...args) {
|
|
76
75
|
if (error) {
|
|
77
|
-
|
|
76
|
+
ctx.error = error
|
|
77
|
+
errorCh.publish(ctx)
|
|
78
78
|
}
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
79
|
+
|
|
80
|
+
ctx.result = result
|
|
81
|
+
finishCh.runStores(ctx, cb, this, error, result, ...args)
|
|
82
|
+
})
|
|
82
83
|
|
|
83
84
|
try {
|
|
84
85
|
return fn.apply(this, arguments)
|
|
85
86
|
// TODO deal with promise versions when we support `dns/promises`
|
|
86
87
|
} catch (error) {
|
|
87
88
|
error.stack // trigger getting the stack at the original throwing point
|
|
88
|
-
|
|
89
|
+
ctx.error = error
|
|
90
|
+
errorCh.publish(ctx)
|
|
89
91
|
|
|
90
92
|
throw error
|
|
91
93
|
}
|