dd-trace 5.13.0 → 5.14.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -1
- package/index.d.ts +7 -0
- package/init.js +27 -3
- package/package.json +3 -3
- package/packages/datadog-esbuild/index.js +1 -31
- package/packages/datadog-instrumentations/src/check_require_cache.js +41 -0
- package/packages/datadog-instrumentations/src/helpers/hooks.js +0 -1
- package/packages/datadog-instrumentations/src/helpers/register.js +7 -1
- package/packages/datadog-instrumentations/src/jest.js +0 -48
- package/packages/datadog-instrumentations/src/utils/src/extract-package-and-module-path.js +33 -0
- package/packages/dd-trace/src/appsec/iast/iast-log.js +2 -33
- package/packages/dd-trace/src/config.js +18 -8
- package/packages/dd-trace/src/encode/0.4.js +50 -3
- package/packages/dd-trace/src/plugins/index.js +0 -1
- package/packages/dd-trace/src/profiling/profiler.js +23 -7
- package/packages/dd-trace/src/proxy.js +7 -1
- package/packages/dd-trace/src/serverless.js +3 -5
- package/packages/dd-trace/src/telemetry/logs/log-collector.js +42 -1
package/README.md
CHANGED
|
@@ -108,4 +108,11 @@ If you would like to trace your bundled application then please read this page o
|
|
|
108
108
|
|
|
109
109
|
## Security Vulnerabilities
|
|
110
110
|
|
|
111
|
-
Please refer to the [SECURITY.md](https://github.com/DataDog/dd-trace-js/blob/master/SECURITY.md) document if you have found a security issue.
|
|
111
|
+
Please refer to the [SECURITY.md](https://github.com/DataDog/dd-trace-js/blob/master/SECURITY.md) document if you have found a security issue.
|
|
112
|
+
|
|
113
|
+
## Datadog With OpenTelemetery
|
|
114
|
+
|
|
115
|
+
Please refer to the [Node.js Custom Instrumentation using OpenTelemetry API](https://docs.datadoghq.com/tracing/trace_collection/custom_instrumentation/nodejs/otel/) document. It includes information on how to use the OpenTelemetry API with dd-trace-js
|
|
116
|
+
|
|
117
|
+
Note that our internal implementation of the OpenTelemetry API is currently set within the version range `>=1.0.0 <1.9.0`. This range will be updated at a regular cadence therefore, we recommend updating your tracer to the latest release to ensure up to date support.
|
|
118
|
+
|
package/index.d.ts
CHANGED
|
@@ -144,6 +144,7 @@ interface Plugins {
|
|
|
144
144
|
"aws-sdk": tracer.plugins.aws_sdk;
|
|
145
145
|
"bunyan": tracer.plugins.bunyan;
|
|
146
146
|
"cassandra-driver": tracer.plugins.cassandra_driver;
|
|
147
|
+
"child_process": tracer.plugins.child_process;
|
|
147
148
|
"connect": tracer.plugins.connect;
|
|
148
149
|
"couchbase": tracer.plugins.couchbase;
|
|
149
150
|
"cucumber": tracer.plugins.cucumber;
|
|
@@ -1207,6 +1208,12 @@ declare namespace tracer {
|
|
|
1207
1208
|
*/
|
|
1208
1209
|
interface cassandra_driver extends Instrumentation {}
|
|
1209
1210
|
|
|
1211
|
+
/**
|
|
1212
|
+
* This plugin automatically instruments the
|
|
1213
|
+
* [child_process](https://nodejs.org/api/child_process.html) module.
|
|
1214
|
+
*/
|
|
1215
|
+
interface child_process extends Instrumentation {}
|
|
1216
|
+
|
|
1210
1217
|
/**
|
|
1211
1218
|
* This plugin automatically instruments the
|
|
1212
1219
|
* [connect](https://github.com/senchalabs/connect) module.
|
package/init.js
CHANGED
|
@@ -1,7 +1,31 @@
|
|
|
1
1
|
'use strict'
|
|
2
2
|
|
|
3
|
-
const
|
|
3
|
+
const path = require('path')
|
|
4
|
+
const Module = require('module')
|
|
4
5
|
|
|
5
|
-
|
|
6
|
+
let initBailout = false
|
|
6
7
|
|
|
7
|
-
|
|
8
|
+
if (process.env.DD_INJECTION_ENABLED) {
|
|
9
|
+
// If we're running via single-step install, and we're not in the app's
|
|
10
|
+
// node_modules, then we should not initialize the tracer. This prevents
|
|
11
|
+
// single-step-installed tracer from clobbering the manually-installed tracer.
|
|
12
|
+
let resolvedInApp
|
|
13
|
+
const entrypoint = process.argv[1]
|
|
14
|
+
try {
|
|
15
|
+
resolvedInApp = Module.createRequire(entrypoint).resolve('dd-trace')
|
|
16
|
+
} catch (e) {
|
|
17
|
+
// Ignore. If we can't resolve the module, we assume it's not in the app.
|
|
18
|
+
}
|
|
19
|
+
if (resolvedInApp) {
|
|
20
|
+
const ourselves = path.join(__dirname, 'index.js')
|
|
21
|
+
if (ourselves !== resolvedInApp) {
|
|
22
|
+
initBailout = true
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
if (!initBailout) {
|
|
28
|
+
const tracer = require('.')
|
|
29
|
+
tracer.init()
|
|
30
|
+
module.exports = tracer
|
|
31
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "dd-trace",
|
|
3
|
-
"version": "5.
|
|
3
|
+
"version": "5.14.1",
|
|
4
4
|
"description": "Datadog APM tracing client for JavaScript",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"typings": "index.d.ts",
|
|
@@ -74,9 +74,9 @@
|
|
|
74
74
|
"@datadog/native-iast-rewriter": "2.3.1",
|
|
75
75
|
"@datadog/native-iast-taint-tracking": "2.1.0",
|
|
76
76
|
"@datadog/native-metrics": "^2.0.0",
|
|
77
|
-
"@datadog/pprof": "5.
|
|
77
|
+
"@datadog/pprof": "5.3.0",
|
|
78
78
|
"@datadog/sketches-js": "^2.1.0",
|
|
79
|
-
"@opentelemetry/api": "
|
|
79
|
+
"@opentelemetry/api": ">=1.0.0 <1.9.0",
|
|
80
80
|
"@opentelemetry/core": "^1.14.0",
|
|
81
81
|
"crypto-randomuuid": "^1.0.0",
|
|
82
82
|
"dc-polyfill": "^0.1.4",
|
|
@@ -4,6 +4,7 @@
|
|
|
4
4
|
|
|
5
5
|
const instrumentations = require('../datadog-instrumentations/src/helpers/instrumentations.js')
|
|
6
6
|
const hooks = require('../datadog-instrumentations/src/helpers/hooks.js')
|
|
7
|
+
const extractPackageAndModulePath = require('../datadog-instrumentations/src/utils/src/extract-package-and-module-path')
|
|
7
8
|
|
|
8
9
|
for (const hook of Object.values(hooks)) {
|
|
9
10
|
hook()
|
|
@@ -21,7 +22,6 @@ for (const instrumentation of Object.values(instrumentations)) {
|
|
|
21
22
|
}
|
|
22
23
|
}
|
|
23
24
|
|
|
24
|
-
const NM = 'node_modules/'
|
|
25
25
|
const INSTRUMENTED = Object.keys(instrumentations)
|
|
26
26
|
const RAW_BUILTINS = require('module').builtinModules
|
|
27
27
|
const CHANNEL = 'dd-trace:bundler:load'
|
|
@@ -181,33 +181,3 @@ function dotFriendlyResolve (path, directory) {
|
|
|
181
181
|
|
|
182
182
|
return require.resolve(path, { paths: [directory] })
|
|
183
183
|
}
|
|
184
|
-
|
|
185
|
-
/**
|
|
186
|
-
* For a given full path to a module,
|
|
187
|
-
* return the package name it belongs to and the local path to the module
|
|
188
|
-
* input: '/foo/node_modules/@co/stuff/foo/bar/baz.js'
|
|
189
|
-
* output: { pkg: '@co/stuff', path: 'foo/bar/baz.js' }
|
|
190
|
-
*/
|
|
191
|
-
function extractPackageAndModulePath (fullPath) {
|
|
192
|
-
const nm = fullPath.lastIndexOf(NM)
|
|
193
|
-
if (nm < 0) {
|
|
194
|
-
return { pkg: null, path: null }
|
|
195
|
-
}
|
|
196
|
-
|
|
197
|
-
const subPath = fullPath.substring(nm + NM.length)
|
|
198
|
-
const firstSlash = subPath.indexOf('/')
|
|
199
|
-
|
|
200
|
-
if (subPath[0] === '@') {
|
|
201
|
-
const secondSlash = subPath.substring(firstSlash + 1).indexOf('/')
|
|
202
|
-
|
|
203
|
-
return {
|
|
204
|
-
pkg: subPath.substring(0, firstSlash + 1 + secondSlash),
|
|
205
|
-
path: subPath.substring(firstSlash + 1 + secondSlash + 1)
|
|
206
|
-
}
|
|
207
|
-
}
|
|
208
|
-
|
|
209
|
-
return {
|
|
210
|
-
pkg: subPath.substring(0, firstSlash),
|
|
211
|
-
path: subPath.substring(firstSlash + 1)
|
|
212
|
-
}
|
|
213
|
-
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
/* eslint-disable no-console */
|
|
4
|
+
|
|
5
|
+
const extractPackageAndModulePath = require('./utils/src/extract-package-and-module-path')
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* The lowest hanging fruit to debug an app that isn't tracing
|
|
9
|
+
* properly is to check that it is loaded before any modules
|
|
10
|
+
* that need to be instrumented. This function checks the
|
|
11
|
+
* `require.cache` to see if any supported packages have
|
|
12
|
+
* already been required and prints a warning.
|
|
13
|
+
*
|
|
14
|
+
* Note that this only going to work for modules within npm
|
|
15
|
+
* packages, like `express`, and not internal modules, like
|
|
16
|
+
* `http`.
|
|
17
|
+
*
|
|
18
|
+
* The output isn't necessarily 100% perfect. For example if the
|
|
19
|
+
* app loads a package we instrument but outside of an
|
|
20
|
+
* unsupported version then a warning would still be displayed.
|
|
21
|
+
* This is OK as the tracer should be loaded earlier anyway.
|
|
22
|
+
*/
|
|
23
|
+
module.exports = function () {
|
|
24
|
+
const packages = require('../../datadog-instrumentations/src/helpers/hooks')
|
|
25
|
+
const naughties = new Set()
|
|
26
|
+
let didWarn = false
|
|
27
|
+
|
|
28
|
+
for (const pathToModule of Object.keys(require.cache)) {
|
|
29
|
+
const { pkg } = extractPackageAndModulePath(pathToModule)
|
|
30
|
+
|
|
31
|
+
if (naughties.has(pkg)) continue
|
|
32
|
+
if (!(pkg in packages)) continue
|
|
33
|
+
|
|
34
|
+
console.error(`Warning: Package '${pkg}' was loaded before dd-trace! This may break instrumentation.`)
|
|
35
|
+
|
|
36
|
+
naughties.add(pkg)
|
|
37
|
+
didWarn = true
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
if (didWarn) console.error('Warning: Please ensure dd-trace is loaded before other modules.')
|
|
41
|
+
}
|
|
@@ -57,7 +57,6 @@ module.exports = {
|
|
|
57
57
|
'jest-config': () => require('../jest'),
|
|
58
58
|
'jest-environment-node': () => require('../jest'),
|
|
59
59
|
'jest-environment-jsdom': () => require('../jest'),
|
|
60
|
-
'jest-jasmine2': () => require('../jest'),
|
|
61
60
|
'jest-runtime': () => require('../jest'),
|
|
62
61
|
'jest-worker': () => require('../jest'),
|
|
63
62
|
knex: () => require('../knex'),
|
|
@@ -6,8 +6,12 @@ const semver = require('semver')
|
|
|
6
6
|
const Hook = require('./hook')
|
|
7
7
|
const requirePackageJson = require('../../../dd-trace/src/require-package-json')
|
|
8
8
|
const log = require('../../../dd-trace/src/log')
|
|
9
|
+
const checkRequireCache = require('../check_require_cache')
|
|
9
10
|
|
|
10
|
-
const {
|
|
11
|
+
const {
|
|
12
|
+
DD_TRACE_DISABLED_INSTRUMENTATIONS = '',
|
|
13
|
+
DD_TRACE_DEBUG = ''
|
|
14
|
+
} = process.env
|
|
11
15
|
|
|
12
16
|
const hooks = require('./hooks')
|
|
13
17
|
const instrumentations = require('./instrumentations')
|
|
@@ -27,6 +31,8 @@ if (!disabledInstrumentations.has('fetch')) {
|
|
|
27
31
|
const HOOK_SYMBOL = Symbol('hookExportsMap')
|
|
28
32
|
// TODO: make this more efficient
|
|
29
33
|
|
|
34
|
+
if (DD_TRACE_DEBUG && DD_TRACE_DEBUG.toLowerCase() !== 'false') checkRequireCache()
|
|
35
|
+
|
|
30
36
|
for (const packageName of names) {
|
|
31
37
|
if (disabledInstrumentations.has(packageName)) continue
|
|
32
38
|
|
|
@@ -19,7 +19,6 @@ const {
|
|
|
19
19
|
getJestTestName,
|
|
20
20
|
getJestSuitesToRun
|
|
21
21
|
} = require('../../datadog-plugin-jest/src/util')
|
|
22
|
-
const { DD_MAJOR } = require('../../../version')
|
|
23
22
|
|
|
24
23
|
const testSessionStartCh = channel('ci:jest:session:start')
|
|
25
24
|
const testSessionFinishCh = channel('ci:jest:session:finish')
|
|
@@ -68,14 +67,6 @@ let hasFilteredSkippableSuites = false
|
|
|
68
67
|
|
|
69
68
|
const sessionAsyncResource = new AsyncResource('bound-anonymous-fn')
|
|
70
69
|
|
|
71
|
-
const specStatusToTestStatus = {
|
|
72
|
-
pending: 'skip',
|
|
73
|
-
disabled: 'skip',
|
|
74
|
-
todo: 'skip',
|
|
75
|
-
passed: 'pass',
|
|
76
|
-
failed: 'fail'
|
|
77
|
-
}
|
|
78
|
-
|
|
79
70
|
const asyncResources = new WeakMap()
|
|
80
71
|
const originalTestFns = new WeakMap()
|
|
81
72
|
const retriedTestsToNumAttempts = new Map()
|
|
@@ -837,45 +828,6 @@ addHook({
|
|
|
837
828
|
versions: ['24.8.0 - 24.9.0']
|
|
838
829
|
}, jestConfigSyncWrapper)
|
|
839
830
|
|
|
840
|
-
function jasmineAsyncInstallWraper (jasmineAsyncInstallExport, jestVersion) {
|
|
841
|
-
log.warn('jest-jasmine2 support is removed from dd-trace@v4. Consider changing to jest-circus as `testRunner`.')
|
|
842
|
-
return function (globalConfig, globalInput) {
|
|
843
|
-
globalInput._ddtrace = global._ddtrace
|
|
844
|
-
shimmer.wrap(globalInput.jasmine.Spec.prototype, 'execute', execute => function (onComplete) {
|
|
845
|
-
const asyncResource = new AsyncResource('bound-anonymous-fn')
|
|
846
|
-
asyncResource.runInAsyncScope(() => {
|
|
847
|
-
const testSuite = getTestSuitePath(this.result.testPath, globalConfig.rootDir)
|
|
848
|
-
testStartCh.publish({
|
|
849
|
-
name: this.getFullName(),
|
|
850
|
-
suite: testSuite,
|
|
851
|
-
runner: 'jest-jasmine2',
|
|
852
|
-
frameworkVersion: jestVersion
|
|
853
|
-
})
|
|
854
|
-
const spec = this
|
|
855
|
-
const callback = asyncResource.bind(function () {
|
|
856
|
-
if (spec.result.failedExpectations && spec.result.failedExpectations.length) {
|
|
857
|
-
const formattedError = formatJestError(spec.result.failedExpectations[0].error)
|
|
858
|
-
testErrCh.publish(formattedError)
|
|
859
|
-
}
|
|
860
|
-
testRunFinishCh.publish({ status: specStatusToTestStatus[spec.result.status] })
|
|
861
|
-
onComplete.apply(this, arguments)
|
|
862
|
-
})
|
|
863
|
-
arguments[0] = callback
|
|
864
|
-
execute.apply(this, arguments)
|
|
865
|
-
})
|
|
866
|
-
})
|
|
867
|
-
return jasmineAsyncInstallExport.default(globalConfig, globalInput)
|
|
868
|
-
}
|
|
869
|
-
}
|
|
870
|
-
|
|
871
|
-
if (DD_MAJOR < 4) {
|
|
872
|
-
addHook({
|
|
873
|
-
name: 'jest-jasmine2',
|
|
874
|
-
versions: ['>=24.8.0'],
|
|
875
|
-
file: 'build/jasmineAsyncInstall.js'
|
|
876
|
-
}, jasmineAsyncInstallWraper)
|
|
877
|
-
}
|
|
878
|
-
|
|
879
831
|
const LIBRARIES_BYPASSING_JEST_REQUIRE_ENGINE = [
|
|
880
832
|
'selenium-webdriver'
|
|
881
833
|
]
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const NM = 'node_modules/'
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* For a given full path to a module,
|
|
7
|
+
* return the package name it belongs to and the local path to the module
|
|
8
|
+
* input: '/foo/node_modules/@co/stuff/foo/bar/baz.js'
|
|
9
|
+
* output: { pkg: '@co/stuff', path: 'foo/bar/baz.js' }
|
|
10
|
+
*/
|
|
11
|
+
module.exports = function extractPackageAndModulePath (fullPath) {
|
|
12
|
+
const nm = fullPath.lastIndexOf(NM)
|
|
13
|
+
if (nm < 0) {
|
|
14
|
+
return { pkg: null, path: null }
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const subPath = fullPath.substring(nm + NM.length)
|
|
18
|
+
const firstSlash = subPath.indexOf('/')
|
|
19
|
+
|
|
20
|
+
if (subPath[0] === '@') {
|
|
21
|
+
const secondSlash = subPath.substring(firstSlash + 1).indexOf('/')
|
|
22
|
+
|
|
23
|
+
return {
|
|
24
|
+
pkg: subPath.substring(0, firstSlash + 1 + secondSlash),
|
|
25
|
+
path: subPath.substring(firstSlash + 1 + secondSlash + 1)
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
return {
|
|
30
|
+
pkg: subPath.substring(0, firstSlash),
|
|
31
|
+
path: subPath.substring(firstSlash + 1)
|
|
32
|
+
}
|
|
33
|
+
}
|
|
@@ -2,35 +2,9 @@
|
|
|
2
2
|
|
|
3
3
|
const dc = require('dc-polyfill')
|
|
4
4
|
const log = require('../../log')
|
|
5
|
-
const { calculateDDBasePath } = require('../../util')
|
|
6
5
|
|
|
7
6
|
const telemetryLog = dc.channel('datadog:telemetry:log')
|
|
8
7
|
|
|
9
|
-
const ddBasePath = calculateDDBasePath(__dirname)
|
|
10
|
-
const EOL = '\n'
|
|
11
|
-
const STACK_FRAME_LINE_REGEX = /^\s*at\s/gm
|
|
12
|
-
|
|
13
|
-
function sanitize (logEntry, stack) {
|
|
14
|
-
if (!stack) return logEntry
|
|
15
|
-
|
|
16
|
-
let stackLines = stack.split(EOL)
|
|
17
|
-
|
|
18
|
-
const firstIndex = stackLines.findIndex(l => l.match(STACK_FRAME_LINE_REGEX))
|
|
19
|
-
|
|
20
|
-
const isDDCode = firstIndex > -1 && stackLines[firstIndex].includes(ddBasePath)
|
|
21
|
-
stackLines = stackLines
|
|
22
|
-
.filter((line, index) => (isDDCode && index < firstIndex) || line.includes(ddBasePath))
|
|
23
|
-
.map(line => line.replace(ddBasePath, ''))
|
|
24
|
-
|
|
25
|
-
logEntry.stack_trace = stackLines.join(EOL)
|
|
26
|
-
|
|
27
|
-
if (!isDDCode) {
|
|
28
|
-
logEntry.message = 'omitted'
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
return logEntry
|
|
32
|
-
}
|
|
33
|
-
|
|
34
8
|
function getTelemetryLog (data, level) {
|
|
35
9
|
try {
|
|
36
10
|
data = typeof data === 'function' ? data() : data
|
|
@@ -42,18 +16,13 @@ function getTelemetryLog (data, level) {
|
|
|
42
16
|
message = String(data.message || data)
|
|
43
17
|
}
|
|
44
18
|
|
|
45
|
-
|
|
19
|
+
const logEntry = {
|
|
46
20
|
message,
|
|
47
21
|
level
|
|
48
22
|
}
|
|
49
|
-
|
|
50
23
|
if (data.stack) {
|
|
51
|
-
logEntry =
|
|
52
|
-
if (logEntry.stack_trace === '') {
|
|
53
|
-
return
|
|
54
|
-
}
|
|
24
|
+
logEntry.stack_trace = data.stack
|
|
55
25
|
}
|
|
56
|
-
|
|
57
26
|
return logEntry
|
|
58
27
|
} catch (e) {
|
|
59
28
|
log.error(e)
|
|
@@ -16,7 +16,7 @@ const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('./plugins/util/tags')
|
|
|
16
16
|
const { getGitMetadataFromGitProperties, removeUserSensitiveInfo } = require('./git_properties')
|
|
17
17
|
const { updateConfig } = require('./telemetry')
|
|
18
18
|
const telemetryMetrics = require('./telemetry/metrics')
|
|
19
|
-
const { getIsGCPFunction,
|
|
19
|
+
const { getIsGCPFunction, getIsAzureFunction } = require('./serverless')
|
|
20
20
|
const { ORIGIN_KEY } = require('./constants')
|
|
21
21
|
|
|
22
22
|
const tracerMetrics = telemetryMetrics.manager.namespace('tracers')
|
|
@@ -339,7 +339,7 @@ class Config {
|
|
|
339
339
|
|
|
340
340
|
// Requires an accompanying DD_APM_OBFUSCATION_MEMCACHED_KEEP_COMMAND=true in the agent
|
|
341
341
|
this.memcachedCommandEnabled = isTrue(DD_TRACE_MEMCACHED_COMMAND_ENABLED)
|
|
342
|
-
this.
|
|
342
|
+
this.isAzureFunction = getIsAzureFunction()
|
|
343
343
|
this.spanLeakDebug = Number(DD_TRACE_SPAN_LEAK_DEBUG)
|
|
344
344
|
this.installSignature = {
|
|
345
345
|
id: DD_INSTRUMENTATION_INSTALL_ID,
|
|
@@ -417,8 +417,8 @@ class Config {
|
|
|
417
417
|
_isInServerlessEnvironment () {
|
|
418
418
|
const inAWSLambda = process.env.AWS_LAMBDA_FUNCTION_NAME !== undefined
|
|
419
419
|
const isGCPFunction = getIsGCPFunction()
|
|
420
|
-
const
|
|
421
|
-
return inAWSLambda || isGCPFunction ||
|
|
420
|
+
const isAzureFunction = getIsAzureFunction()
|
|
421
|
+
return inAWSLambda || isGCPFunction || isAzureFunction
|
|
422
422
|
}
|
|
423
423
|
|
|
424
424
|
// for _merge to work, every config value must have a default value
|
|
@@ -549,6 +549,7 @@ class Config {
|
|
|
549
549
|
DD_IAST_REDACTION_VALUE_PATTERN,
|
|
550
550
|
DD_IAST_REQUEST_SAMPLING,
|
|
551
551
|
DD_IAST_TELEMETRY_VERBOSITY,
|
|
552
|
+
DD_INJECTION_ENABLED,
|
|
552
553
|
DD_INSTRUMENTATION_TELEMETRY_ENABLED,
|
|
553
554
|
DD_INSTRUMENTATION_CONFIG_ID,
|
|
554
555
|
DD_LOGS_INJECTION,
|
|
@@ -705,7 +706,14 @@ class Config {
|
|
|
705
706
|
this._setBoolean(env, 'telemetry.debug', DD_TELEMETRY_DEBUG)
|
|
706
707
|
this._setBoolean(env, 'telemetry.dependencyCollection', DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED)
|
|
707
708
|
this._setValue(env, 'telemetry.heartbeatInterval', maybeInt(Math.floor(DD_TELEMETRY_HEARTBEAT_INTERVAL * 1000)))
|
|
708
|
-
|
|
709
|
+
const hasTelemetryLogsUsingFeatures =
|
|
710
|
+
isTrue(DD_IAST_ENABLED) ||
|
|
711
|
+
isTrue(DD_PROFILING_ENABLED) ||
|
|
712
|
+
(typeof DD_INJECTION_ENABLED === 'string' && DD_INJECTION_ENABLED.split(',').includes('profiling'))
|
|
713
|
+
? true
|
|
714
|
+
: undefined
|
|
715
|
+
this._setBoolean(env, 'telemetry.logCollection', coalesce(DD_TELEMETRY_LOG_COLLECTION_ENABLED,
|
|
716
|
+
hasTelemetryLogsUsingFeatures))
|
|
709
717
|
this._setBoolean(env, 'telemetry.metrics', DD_TELEMETRY_METRICS_ENABLED)
|
|
710
718
|
this._setBoolean(env, 'traceId128BitGenerationEnabled', DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED)
|
|
711
719
|
this._setBoolean(env, 'traceId128BitLoggingEnabled', DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED)
|
|
@@ -786,8 +794,10 @@ class Config {
|
|
|
786
794
|
this._setBoolean(opts, 'spanRemoveIntegrationFromService', options.spanRemoveIntegrationFromService)
|
|
787
795
|
this._setBoolean(opts, 'startupLogs', options.startupLogs)
|
|
788
796
|
this._setTags(opts, 'tags', tags)
|
|
789
|
-
|
|
790
|
-
|
|
797
|
+
const hasTelemetryLogsUsingFeatures =
|
|
798
|
+
(options.iastOptions && (options.iastOptions === true || options.iastOptions?.enabled === true)) ||
|
|
799
|
+
(options.profiling && options.profiling === true)
|
|
800
|
+
this._setBoolean(opts, 'telemetry.logCollection', hasTelemetryLogsUsingFeatures)
|
|
791
801
|
this._setBoolean(opts, 'traceId128BitGenerationEnabled', options.traceId128BitGenerationEnabled)
|
|
792
802
|
this._setBoolean(opts, 'traceId128BitLoggingEnabled', options.traceId128BitLoggingEnabled)
|
|
793
803
|
this._setString(opts, 'version', options.version || tags.version)
|
|
@@ -867,7 +877,7 @@ class Config {
|
|
|
867
877
|
return coalesce(
|
|
868
878
|
this.options.stats,
|
|
869
879
|
process.env.DD_TRACE_STATS_COMPUTATION_ENABLED,
|
|
870
|
-
getIsGCPFunction() ||
|
|
880
|
+
getIsGCPFunction() || getIsAzureFunction()
|
|
871
881
|
)
|
|
872
882
|
}
|
|
873
883
|
|
|
@@ -83,13 +83,17 @@ class AgentEncoder {
|
|
|
83
83
|
span = formatSpan(span)
|
|
84
84
|
bytes.reserve(1)
|
|
85
85
|
|
|
86
|
-
if (span.type) {
|
|
86
|
+
if (span.type && span.meta_struct) {
|
|
87
|
+
bytes.buffer[bytes.length++] = 0x8d
|
|
88
|
+
} else if (span.type || span.meta_struct) {
|
|
87
89
|
bytes.buffer[bytes.length++] = 0x8c
|
|
90
|
+
} else {
|
|
91
|
+
bytes.buffer[bytes.length++] = 0x8b
|
|
92
|
+
}
|
|
88
93
|
|
|
94
|
+
if (span.type) {
|
|
89
95
|
this._encodeString(bytes, 'type')
|
|
90
96
|
this._encodeString(bytes, span.type)
|
|
91
|
-
} else {
|
|
92
|
-
bytes.buffer[bytes.length++] = 0x8b
|
|
93
97
|
}
|
|
94
98
|
|
|
95
99
|
this._encodeString(bytes, 'trace_id')
|
|
@@ -114,6 +118,10 @@ class AgentEncoder {
|
|
|
114
118
|
this._encodeMap(bytes, span.meta)
|
|
115
119
|
this._encodeString(bytes, 'metrics')
|
|
116
120
|
this._encodeMap(bytes, span.metrics)
|
|
121
|
+
if (span.meta_struct) {
|
|
122
|
+
this._encodeString(bytes, 'meta_struct')
|
|
123
|
+
this._encodeObject(bytes, span.meta_struct)
|
|
124
|
+
}
|
|
117
125
|
}
|
|
118
126
|
}
|
|
119
127
|
|
|
@@ -263,6 +271,45 @@ class AgentEncoder {
|
|
|
263
271
|
}
|
|
264
272
|
}
|
|
265
273
|
|
|
274
|
+
_encodeObject (bytes, value, circularReferencesDetector = new Set()) {
|
|
275
|
+
circularReferencesDetector.add(value)
|
|
276
|
+
if (Array.isArray(value)) {
|
|
277
|
+
return this._encodeObjectAsArray(bytes, value, circularReferencesDetector)
|
|
278
|
+
} else if (value !== null && typeof value === 'object') {
|
|
279
|
+
return this._encodeObjectAsMap(bytes, value, circularReferencesDetector)
|
|
280
|
+
} else if (typeof value === 'string' || typeof value === 'number') {
|
|
281
|
+
this._encodeValue(bytes, value)
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
_encodeObjectAsMap (bytes, value, circularReferencesDetector) {
|
|
286
|
+
const keys = Object.keys(value)
|
|
287
|
+
const validKeys = keys.filter(key =>
|
|
288
|
+
typeof value[key] === 'string' ||
|
|
289
|
+
typeof value[key] === 'number' ||
|
|
290
|
+
(value[key] !== null && typeof value[key] === 'object' && !circularReferencesDetector.has(value[key])))
|
|
291
|
+
|
|
292
|
+
this._encodeMapPrefix(bytes, validKeys.length)
|
|
293
|
+
|
|
294
|
+
for (const key of validKeys) {
|
|
295
|
+
this._encodeString(bytes, key)
|
|
296
|
+
this._encodeObject(bytes, value[key], circularReferencesDetector)
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
_encodeObjectAsArray (bytes, value, circularReferencesDetector) {
|
|
301
|
+
const validValue = value.filter(item =>
|
|
302
|
+
typeof item === 'string' ||
|
|
303
|
+
typeof item === 'number' ||
|
|
304
|
+
(item !== null && typeof item === 'object' && !circularReferencesDetector.has(item)))
|
|
305
|
+
|
|
306
|
+
this._encodeArrayPrefix(bytes, validValue)
|
|
307
|
+
|
|
308
|
+
for (const item of validValue) {
|
|
309
|
+
this._encodeObject(bytes, item, circularReferencesDetector)
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
|
|
266
313
|
_cacheString (value) {
|
|
267
314
|
if (!(value in this._stringMap)) {
|
|
268
315
|
this._stringCount++
|
|
@@ -44,7 +44,6 @@ module.exports = {
|
|
|
44
44
|
get 'jest-config' () { return require('../../../datadog-plugin-jest/src') },
|
|
45
45
|
get 'jest-environment-node' () { return require('../../../datadog-plugin-jest/src') },
|
|
46
46
|
get 'jest-environment-jsdom' () { return require('../../../datadog-plugin-jest/src') },
|
|
47
|
-
get 'jest-jasmine2' () { return require('../../../datadog-plugin-jest/src') },
|
|
48
47
|
get 'jest-runtime' () { return require('../../../datadog-plugin-jest/src') },
|
|
49
48
|
get 'jest-worker' () { return require('../../../datadog-plugin-jest/src') },
|
|
50
49
|
get koa () { return require('../../../datadog-plugin-koa/src') },
|
|
@@ -5,6 +5,7 @@ const { Config } = require('./config')
|
|
|
5
5
|
const { snapshotKinds } = require('./constants')
|
|
6
6
|
const { threadNamePrefix } = require('./profilers/shared')
|
|
7
7
|
const dc = require('dc-polyfill')
|
|
8
|
+
const telemetryLog = dc.channel('datadog:telemetry:log')
|
|
8
9
|
|
|
9
10
|
const profileSubmittedChannel = dc.channel('datadog:profiling:profile-submitted')
|
|
10
11
|
|
|
@@ -15,6 +16,19 @@ function maybeSourceMap (sourceMap, SourceMapper, debug) {
|
|
|
15
16
|
], debug)
|
|
16
17
|
}
|
|
17
18
|
|
|
19
|
+
function logError (logger, err) {
|
|
20
|
+
if (logger) {
|
|
21
|
+
logger.error(err)
|
|
22
|
+
}
|
|
23
|
+
if (telemetryLog.hasSubscribers) {
|
|
24
|
+
telemetryLog.publish({
|
|
25
|
+
message: err.message,
|
|
26
|
+
level: 'ERROR',
|
|
27
|
+
stack_trace: err.stack
|
|
28
|
+
})
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
18
32
|
class Profiler extends EventEmitter {
|
|
19
33
|
constructor () {
|
|
20
34
|
super()
|
|
@@ -28,13 +42,15 @@ class Profiler extends EventEmitter {
|
|
|
28
42
|
|
|
29
43
|
start (options) {
|
|
30
44
|
return this._start(options).catch((err) => {
|
|
31
|
-
|
|
32
|
-
options.logger.error(err)
|
|
33
|
-
}
|
|
45
|
+
logError(options.logger, err)
|
|
34
46
|
return false
|
|
35
47
|
})
|
|
36
48
|
}
|
|
37
49
|
|
|
50
|
+
_logError (err) {
|
|
51
|
+
logError(this._logger, err)
|
|
52
|
+
}
|
|
53
|
+
|
|
38
54
|
async _start (options) {
|
|
39
55
|
if (this._enabled) return true
|
|
40
56
|
|
|
@@ -61,7 +77,7 @@ class Profiler extends EventEmitter {
|
|
|
61
77
|
})
|
|
62
78
|
}
|
|
63
79
|
} catch (err) {
|
|
64
|
-
this.
|
|
80
|
+
this._logError(err)
|
|
65
81
|
}
|
|
66
82
|
|
|
67
83
|
try {
|
|
@@ -78,7 +94,7 @@ class Profiler extends EventEmitter {
|
|
|
78
94
|
this._capture(this._timeoutInterval, start)
|
|
79
95
|
return true
|
|
80
96
|
} catch (e) {
|
|
81
|
-
this.
|
|
97
|
+
this._logError(e)
|
|
82
98
|
this._stop()
|
|
83
99
|
return false
|
|
84
100
|
}
|
|
@@ -167,7 +183,7 @@ class Profiler extends EventEmitter {
|
|
|
167
183
|
profileSubmittedChannel.publish()
|
|
168
184
|
this._logger.debug('Submitted profiles')
|
|
169
185
|
} catch (err) {
|
|
170
|
-
this.
|
|
186
|
+
this._logError(err)
|
|
171
187
|
this._stop()
|
|
172
188
|
}
|
|
173
189
|
}
|
|
@@ -182,7 +198,7 @@ class Profiler extends EventEmitter {
|
|
|
182
198
|
tags.snapshot = snapshotKind
|
|
183
199
|
for (const exporter of this._config.exporters) {
|
|
184
200
|
const task = exporter.export({ profiles, start, end, tags })
|
|
185
|
-
.catch(err => this.
|
|
201
|
+
.catch(err => this._logError(err))
|
|
186
202
|
|
|
187
203
|
tasks.push(task)
|
|
188
204
|
}
|
|
@@ -14,6 +14,7 @@ const dogstatsd = require('./dogstatsd')
|
|
|
14
14
|
const NoopDogStatsDClient = require('./noop/dogstatsd')
|
|
15
15
|
const spanleak = require('./spanleak')
|
|
16
16
|
const { SSITelemetry } = require('./profiling/ssi-telemetry')
|
|
17
|
+
const telemetryLog = require('dc-polyfill').channel('datadog:telemetry:log')
|
|
17
18
|
|
|
18
19
|
class LazyModule {
|
|
19
20
|
constructor (provider) {
|
|
@@ -91,7 +92,7 @@ class Tracer extends NoopProxy {
|
|
|
91
92
|
})
|
|
92
93
|
}
|
|
93
94
|
|
|
94
|
-
if (config.isGCPFunction || config.
|
|
95
|
+
if (config.isGCPFunction || config.isAzureFunction) {
|
|
95
96
|
require('./serverless').maybeStartServerlessMiniAgent(config)
|
|
96
97
|
}
|
|
97
98
|
|
|
@@ -104,6 +105,11 @@ class Tracer extends NoopProxy {
|
|
|
104
105
|
this._profilerStarted = profiler.start(config)
|
|
105
106
|
} catch (e) {
|
|
106
107
|
log.error(e)
|
|
108
|
+
telemetryLog.publish({
|
|
109
|
+
message: e.message,
|
|
110
|
+
level: 'ERROR',
|
|
111
|
+
stack_trace: e.stack
|
|
112
|
+
})
|
|
107
113
|
}
|
|
108
114
|
} else if (ssiTelemetry.enabled()) {
|
|
109
115
|
require('./profiling/ssi-telemetry-mock-profiler').start(config)
|
|
@@ -53,18 +53,16 @@ function getIsGCPFunction () {
|
|
|
53
53
|
return isDeprecatedGCPFunction || isNewerGCPFunction
|
|
54
54
|
}
|
|
55
55
|
|
|
56
|
-
function
|
|
56
|
+
function getIsAzureFunction () {
|
|
57
57
|
const isAzureFunction =
|
|
58
58
|
process.env.FUNCTIONS_EXTENSION_VERSION !== undefined && process.env.FUNCTIONS_WORKER_RUNTIME !== undefined
|
|
59
|
-
const azureWebsiteSKU = process.env.WEBSITE_SKU
|
|
60
|
-
const isConsumptionPlan = azureWebsiteSKU === undefined || azureWebsiteSKU === 'Dynamic'
|
|
61
59
|
|
|
62
|
-
return isAzureFunction
|
|
60
|
+
return isAzureFunction
|
|
63
61
|
}
|
|
64
62
|
|
|
65
63
|
module.exports = {
|
|
66
64
|
maybeStartServerlessMiniAgent,
|
|
67
65
|
getIsGCPFunction,
|
|
68
|
-
|
|
66
|
+
getIsAzureFunction,
|
|
69
67
|
getRustBinaryPath
|
|
70
68
|
}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
'use strict'
|
|
2
2
|
|
|
3
3
|
const log = require('../../log')
|
|
4
|
+
const { calculateDDBasePath } = require('../../util')
|
|
4
5
|
|
|
5
6
|
const logs = new Map()
|
|
6
7
|
|
|
@@ -29,6 +30,37 @@ function isValid (logEntry) {
|
|
|
29
30
|
return logEntry?.level && logEntry.message
|
|
30
31
|
}
|
|
31
32
|
|
|
33
|
+
const ddBasePath = calculateDDBasePath(__dirname)
|
|
34
|
+
const EOL = '\n'
|
|
35
|
+
const STACK_FRAME_LINE_REGEX = /^\s*at\s/gm
|
|
36
|
+
|
|
37
|
+
function sanitize (logEntry) {
|
|
38
|
+
const stack = logEntry.stack_trace
|
|
39
|
+
if (!stack) return logEntry
|
|
40
|
+
|
|
41
|
+
let stackLines = stack.split(EOL)
|
|
42
|
+
|
|
43
|
+
const firstIndex = stackLines.findIndex(l => l.match(STACK_FRAME_LINE_REGEX))
|
|
44
|
+
|
|
45
|
+
const isDDCode = firstIndex > -1 && stackLines[firstIndex].includes(ddBasePath)
|
|
46
|
+
stackLines = stackLines
|
|
47
|
+
.filter((line, index) => (isDDCode && index < firstIndex) || line.includes(ddBasePath))
|
|
48
|
+
.map(line => line.replace(ddBasePath, ''))
|
|
49
|
+
|
|
50
|
+
logEntry.stack_trace = stackLines.join(EOL)
|
|
51
|
+
if (logEntry.stack_trace === '') {
|
|
52
|
+
// If entire stack was removed, we'd just have a message saying "omitted"
|
|
53
|
+
// in which case we'd rather not log it at all.
|
|
54
|
+
return null
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
if (!isDDCode) {
|
|
58
|
+
logEntry.message = 'omitted'
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
return logEntry
|
|
62
|
+
}
|
|
63
|
+
|
|
32
64
|
const logCollector = {
|
|
33
65
|
add (logEntry) {
|
|
34
66
|
try {
|
|
@@ -37,9 +69,13 @@ const logCollector = {
|
|
|
37
69
|
// NOTE: should errors have higher priority? and discard log entries with lower priority?
|
|
38
70
|
if (logs.size >= maxEntries) {
|
|
39
71
|
overflowedCount++
|
|
40
|
-
return
|
|
72
|
+
return false
|
|
41
73
|
}
|
|
42
74
|
|
|
75
|
+
logEntry = sanitize(logEntry)
|
|
76
|
+
if (!logEntry) {
|
|
77
|
+
return false
|
|
78
|
+
}
|
|
43
79
|
const hash = createHash(logEntry)
|
|
44
80
|
if (!logs.has(hash)) {
|
|
45
81
|
logs.set(hash, logEntry)
|
|
@@ -51,6 +87,11 @@ const logCollector = {
|
|
|
51
87
|
return false
|
|
52
88
|
},
|
|
53
89
|
|
|
90
|
+
// Used for testing
|
|
91
|
+
hasEntry (logEntry) {
|
|
92
|
+
return logs.has(createHash(logEntry))
|
|
93
|
+
},
|
|
94
|
+
|
|
54
95
|
drain () {
|
|
55
96
|
if (logs.size === 0) return
|
|
56
97
|
|