dd-trace 2.12.1 → 2.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/LICENSE-3rdparty.csv +1 -1
  2. package/ext/formats.js +3 -5
  3. package/package.json +5 -5
  4. package/packages/datadog-core/src/storage/async_resource.js +19 -1
  5. package/packages/datadog-instrumentations/index.js +1 -52
  6. package/packages/datadog-instrumentations/src/connect.js +1 -1
  7. package/packages/datadog-instrumentations/src/cucumber.js +15 -0
  8. package/packages/datadog-instrumentations/src/fs.js +11 -0
  9. package/packages/datadog-instrumentations/src/helpers/hooks.js +69 -0
  10. package/packages/datadog-instrumentations/src/helpers/instrument.js +5 -34
  11. package/packages/datadog-instrumentations/src/helpers/instrumentations.js +7 -0
  12. package/packages/datadog-instrumentations/src/helpers/register.js +59 -0
  13. package/packages/datadog-instrumentations/src/jest.js +33 -11
  14. package/packages/datadog-instrumentations/src/koa.js +1 -1
  15. package/packages/datadog-instrumentations/src/mocha.js +4 -1
  16. package/packages/datadog-instrumentations/src/restify.js +27 -5
  17. package/packages/datadog-instrumentations/src/router.js +1 -1
  18. package/packages/datadog-plugin-aws-sdk/src/base.js +1 -2
  19. package/packages/datadog-plugin-aws-sdk/src/services/sqs.js +1 -2
  20. package/packages/datadog-plugin-cucumber/src/index.js +4 -0
  21. package/packages/datadog-plugin-fs/src/index.js +72 -38
  22. package/packages/datadog-plugin-jest/src/index.js +25 -4
  23. package/packages/datadog-plugin-mocha/src/index.js +2 -2
  24. package/packages/datadog-plugin-mongodb-core/src/index.js +21 -6
  25. package/packages/datadog-plugin-oracledb/src/index.js +12 -4
  26. package/packages/datadog-plugin-restify/src/index.js +7 -0
  27. package/packages/dd-trace/index.js +1 -1
  28. package/packages/dd-trace/src/ci-visibility/exporters/agentless/coverage-writer.js +50 -0
  29. package/packages/dd-trace/src/ci-visibility/exporters/agentless/index.js +53 -8
  30. package/packages/dd-trace/src/ci-visibility/exporters/agentless/writer.js +23 -24
  31. package/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js +220 -0
  32. package/packages/dd-trace/src/config.js +13 -0
  33. package/packages/dd-trace/src/encode/0.4.js +55 -58
  34. package/packages/dd-trace/src/encode/agentless-ci-visibility.js +13 -34
  35. package/packages/dd-trace/src/encode/coverage-ci-visibility.js +84 -0
  36. package/packages/dd-trace/src/exporters/agent/index.js +13 -7
  37. package/packages/dd-trace/src/exporters/agent/writer.js +1 -1
  38. package/packages/dd-trace/src/{profiling/exporters → exporters/common}/form-data.js +0 -0
  39. package/packages/dd-trace/src/exporters/common/request.js +65 -39
  40. package/packages/dd-trace/src/exporters/common/writer.js +16 -6
  41. package/packages/dd-trace/src/id.js +16 -13
  42. package/packages/dd-trace/src/index.js +10 -0
  43. package/packages/dd-trace/src/noop/proxy.js +87 -0
  44. package/packages/dd-trace/src/noop/span.js +12 -12
  45. package/packages/dd-trace/src/noop/tracer.js +8 -5
  46. package/packages/dd-trace/src/opentracing/propagation/text_map.js +6 -6
  47. package/packages/dd-trace/src/opentracing/span.js +63 -49
  48. package/packages/dd-trace/src/opentracing/span_context.js +1 -5
  49. package/packages/dd-trace/src/opentracing/tracer.js +31 -36
  50. package/packages/dd-trace/src/plugin_manager.js +97 -65
  51. package/packages/dd-trace/src/plugins/index.js +58 -45
  52. package/packages/dd-trace/src/plugins/util/ci.js +34 -9
  53. package/packages/dd-trace/src/plugins/util/git.js +52 -2
  54. package/packages/dd-trace/src/plugins/util/tags.js +4 -1
  55. package/packages/dd-trace/src/plugins/util/web.js +1 -1
  56. package/packages/dd-trace/src/profiling/exporters/agent.js +1 -1
  57. package/packages/dd-trace/src/profiling/profilers/cpu.js +3 -3
  58. package/packages/dd-trace/src/proxy.js +18 -90
  59. package/packages/dd-trace/src/startup-log.js +8 -19
  60. package/packages/dd-trace/src/telemetry.js +2 -15
  61. package/scripts/install_plugin_modules.js +17 -26
@@ -0,0 +1,220 @@
1
+
2
+ const fs = require('fs')
3
+ const https = require('https')
4
+ const path = require('path')
5
+
6
+ const FormData = require('../../../exporters/common/form-data')
7
+
8
+ const log = require('../../../log')
9
+ const {
10
+ getLatestCommits,
11
+ getRepositoryUrl,
12
+ generatePackFilesForCommits,
13
+ getCommitsToUpload
14
+ } = require('../../../plugins/util/git')
15
+
16
+ const isValidSha = (sha) => /[0-9a-f]{40}/.test(sha)
17
+
18
+ function sanitizeCommits (commits) {
19
+ return commits.map(({ id: commitSha, type }) => {
20
+ if (type !== 'commit') {
21
+ throw new Error('Invalid commit response')
22
+ }
23
+ const sanitizedCommit = commitSha.replace(/[^0-9a-f]+/g, '')
24
+ if (sanitizedCommit !== commitSha || !isValidSha(sanitizedCommit)) {
25
+ throw new Error('Invalid commit format')
26
+ }
27
+ return sanitizedCommit
28
+ })
29
+ }
30
+
31
+ function getCommonRequestOptions (url) {
32
+ return {
33
+ method: 'POST',
34
+ headers: {
35
+ 'dd-api-key': process.env.DATADOG_API_KEY || process.env.DD_API_KEY
36
+ },
37
+ timeout: 15000,
38
+ protocol: url.protocol,
39
+ hostname: url.hostname,
40
+ port: url.port
41
+ }
42
+ }
43
+
44
+ /**
45
+ * This function posts the SHAs of the commits of the last month
46
+ * The response are the commits for which the backend already has information
47
+ * This response is used to know which commits can be ignored from there on
48
+ */
49
+ function getCommitsToExclude ({ url, repositoryUrl }, callback) {
50
+ const latestCommits = getLatestCommits()
51
+ const [headCommit] = latestCommits
52
+
53
+ const commonOptions = getCommonRequestOptions(url)
54
+
55
+ const options = {
56
+ ...commonOptions,
57
+ headers: {
58
+ ...commonOptions.headers,
59
+ 'Content-Type': 'application/json'
60
+ },
61
+ path: '/api/v2/git/repository/search_commits'
62
+ }
63
+
64
+ const localCommitData = JSON.stringify({
65
+ meta: {
66
+ repository_url: repositoryUrl
67
+ },
68
+ data: latestCommits.map(commit => ({
69
+ id: commit,
70
+ type: 'commit'
71
+ }))
72
+ })
73
+
74
+ const request = https.request(options, (res) => {
75
+ let responseData = ''
76
+
77
+ res.on('data', chunk => { responseData += chunk })
78
+ res.on('end', () => {
79
+ if (res.statusCode === 200) {
80
+ let commitsToExclude
81
+ try {
82
+ commitsToExclude = sanitizeCommits(JSON.parse(responseData).data)
83
+ } catch (e) {
84
+ callback(new Error(`Can't parse response: ${e.message}`))
85
+ return
86
+ }
87
+ callback(null, commitsToExclude, headCommit)
88
+ } else {
89
+ const error = new Error(`Error getting commits: ${res.statusCode} ${res.statusMessage}`)
90
+ callback(error)
91
+ }
92
+ })
93
+ })
94
+
95
+ request.write(localCommitData)
96
+ request.on('error', callback)
97
+
98
+ request.end()
99
+
100
+ return request
101
+ }
102
+
103
+ /**
104
+ * This function uploads a git packfile
105
+ */
106
+ function uploadPackFile ({ url, packFileToUpload, repositoryUrl, headCommit }, callback) {
107
+ const form = new FormData()
108
+
109
+ const pushedSha = JSON.stringify({
110
+ data: {
111
+ id: headCommit,
112
+ type: 'commit'
113
+ },
114
+ meta: {
115
+ repository_url: repositoryUrl
116
+ }
117
+ })
118
+
119
+ form.append('pushedSha', pushedSha, { contentType: 'application/json' })
120
+
121
+ try {
122
+ const packFileContent = fs.readFileSync(packFileToUpload)
123
+ // The original filename includes a random prefix, so we remove it here
124
+ const [, filename] = path.basename(packFileToUpload).split('-')
125
+ form.append('packfile', packFileContent, {
126
+ filename,
127
+ contentType: 'application/octet-stream'
128
+ })
129
+ } catch (e) {
130
+ callback(new Error(`Error reading packfile: ${packFileToUpload}`))
131
+ return
132
+ }
133
+
134
+ const commonOptions = getCommonRequestOptions(url)
135
+
136
+ const options = {
137
+ ...commonOptions,
138
+ path: '/api/v2/git/repository/packfile',
139
+ headers: {
140
+ ...commonOptions.headers,
141
+ ...form.getHeaders()
142
+ }
143
+ }
144
+
145
+ const req = https.request(options, res => {
146
+ res.on('data', () => {})
147
+ res.on('end', () => {
148
+ if (res.statusCode === 204) {
149
+ callback(null)
150
+ } else {
151
+ const error = new Error(`Error uploading packfiles: ${res.statusCode} ${res.statusMessage}`)
152
+ error.status = res.statusCode
153
+
154
+ callback(error)
155
+ }
156
+ })
157
+ })
158
+
159
+ req.on('error', err => {
160
+ callback(err)
161
+ })
162
+ form.pipe(req)
163
+ }
164
+
165
+ /**
166
+ * This function uploads git metadata to CI Visibility's backend.
167
+ */
168
+ function sendGitMetadata (site, callback) {
169
+ const url = new URL(`https://api.${site}`)
170
+
171
+ const repositoryUrl = getRepositoryUrl()
172
+
173
+ getCommitsToExclude({ url, repositoryUrl }, (err, commitsToExclude, headCommit) => {
174
+ if (err) {
175
+ callback(err)
176
+ return
177
+ }
178
+ const commitsToUpload = getCommitsToUpload(commitsToExclude)
179
+
180
+ if (!commitsToUpload.length) {
181
+ log.debug('No commits to upload')
182
+ callback(null)
183
+ return
184
+ }
185
+
186
+ const packFilesToUpload = generatePackFilesForCommits(commitsToUpload)
187
+
188
+ let packFileIndex = 0
189
+ // This uploads packfiles sequentially
190
+ const uploadPackFileCallback = (err) => {
191
+ if (err || packFileIndex === packFilesToUpload.length) {
192
+ callback(err)
193
+ return
194
+ }
195
+ return uploadPackFile(
196
+ {
197
+ packFileToUpload: packFilesToUpload[packFileIndex++],
198
+ url,
199
+ repositoryUrl,
200
+ headCommit
201
+ },
202
+ uploadPackFileCallback
203
+ )
204
+ }
205
+
206
+ uploadPackFile(
207
+ {
208
+ url,
209
+ packFileToUpload: packFilesToUpload[packFileIndex++],
210
+ repositoryUrl,
211
+ headCommit
212
+ },
213
+ uploadPackFileCallback
214
+ )
215
+ })
216
+ }
217
+
218
+ module.exports = {
219
+ sendGitMetadata
220
+ }
@@ -67,6 +67,12 @@ class Config {
67
67
  null
68
68
  )
69
69
  const DD_CIVISIBILITY_AGENTLESS_URL = process.env.DD_CIVISIBILITY_AGENTLESS_URL
70
+
71
+ const DD_CIVISIBILITY_ITR_ENABLED = coalesce(
72
+ process.env.DD_CIVISIBILITY_ITR_ENABLED,
73
+ false
74
+ )
75
+
70
76
  const DD_SERVICE = options.service ||
71
77
  process.env.DD_SERVICE ||
72
78
  process.env.DD_SERVICE_NAME ||
@@ -174,6 +180,11 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)
174
180
  |[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY|ssh-rsa\\s*[a-z0-9\\/\\.+]{100,}`
175
181
  )
176
182
 
183
+ const DD_CIVISIBILITY_GIT_UPLOAD_ENABLED = coalesce(
184
+ process.env.DD_CIVISIBILITY_GIT_UPLOAD_ENABLED,
185
+ false
186
+ )
187
+
177
188
  const sampler = (options.experimental && options.experimental.sampler) || {}
178
189
  const ingestion = options.ingestion || {}
179
190
  const dogstatsd = coalesce(options.dogstatsd, {})
@@ -248,6 +259,8 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)
248
259
  obfuscatorKeyRegex: DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP,
249
260
  obfuscatorValueRegex: DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP
250
261
  }
262
+ this.isGitUploadEnabled = isTrue(DD_CIVISIBILITY_GIT_UPLOAD_ENABLED)
263
+ this.isIntelligentTestRunnerEnabled = isTrue(DD_CIVISIBILITY_ITR_ENABLED)
251
264
 
252
265
  tagger.add(this.tags, {
253
266
  service: this.service,
@@ -13,7 +13,8 @@ float64Array[0] = -1
13
13
  const bigEndian = uInt8Float64Array[7] === 0
14
14
 
15
15
  class AgentEncoder {
16
- constructor (writer) {
16
+ constructor (writer, limit = SOFT_LIMIT) {
17
+ this._limit = limit
17
18
  this._traceBytes = new Chunk()
18
19
  this._stringBytes = new Chunk()
19
20
  this._writer = writer
@@ -41,7 +42,8 @@ class AgentEncoder {
41
42
  })
42
43
 
43
44
  // we can go over the soft limit since the agent has a 50MB hard limit
44
- if (this._traceBytes.length > SOFT_LIMIT || this._stringBytes.length > SOFT_LIMIT) {
45
+ if (this._traceBytes.length > this._limit || this._stringBytes.length > this._limit) {
46
+ log.debug('Buffer went over soft limit, flushing')
45
47
  this._writer.flush()
46
48
  }
47
49
  }
@@ -57,6 +59,10 @@ class AgentEncoder {
57
59
  return buffer
58
60
  }
59
61
 
62
+ reset () {
63
+ this._reset()
64
+ }
65
+
60
66
  _encode (bytes, trace) {
61
67
  this._encodeArrayPrefix(bytes, trace)
62
68
 
@@ -109,29 +115,37 @@ class AgentEncoder {
109
115
 
110
116
  _encodeArrayPrefix (bytes, value) {
111
117
  const length = value.length
112
- const buffer = bytes.buffer
113
118
  const offset = bytes.length
114
119
 
115
120
  bytes.reserve(5)
116
121
  bytes.length += 5
117
122
 
118
- buffer[offset] = 0xdd
119
- buffer[offset + 1] = length >> 24
120
- buffer[offset + 2] = length >> 16
121
- buffer[offset + 3] = length >> 8
122
- buffer[offset + 4] = length
123
+ bytes.buffer[offset] = 0xdd
124
+ bytes.buffer[offset + 1] = length >> 24
125
+ bytes.buffer[offset + 2] = length >> 16
126
+ bytes.buffer[offset + 3] = length >> 8
127
+ bytes.buffer[offset + 4] = length
123
128
  }
124
129
 
125
- _encodeByte (bytes, value) {
126
- const buffer = bytes.buffer
130
+ _encodeMapPrefix (bytes, keysLength) {
131
+ const offset = bytes.length
132
+
133
+ bytes.reserve(5)
134
+ bytes.length += 5
135
+ bytes.buffer[offset] = 0xdf
136
+ bytes.buffer[offset + 1] = keysLength >> 24
137
+ bytes.buffer[offset + 2] = keysLength >> 16
138
+ bytes.buffer[offset + 3] = keysLength >> 8
139
+ bytes.buffer[offset + 4] = keysLength
140
+ }
127
141
 
142
+ _encodeByte (bytes, value) {
128
143
  bytes.reserve(1)
129
144
 
130
- buffer[bytes.length++] = value
145
+ bytes.buffer[bytes.length++] = value
131
146
  }
132
147
 
133
148
  _encodeId (bytes, id) {
134
- const buffer = bytes.buffer
135
149
  const offset = bytes.length
136
150
 
137
151
  bytes.reserve(9)
@@ -139,33 +153,31 @@ class AgentEncoder {
139
153
 
140
154
  id = id.toArray()
141
155
 
142
- buffer[offset] = 0xcf
143
- buffer[offset + 1] = id[0]
144
- buffer[offset + 2] = id[1]
145
- buffer[offset + 3] = id[2]
146
- buffer[offset + 4] = id[3]
147
- buffer[offset + 5] = id[4]
148
- buffer[offset + 6] = id[5]
149
- buffer[offset + 7] = id[6]
150
- buffer[offset + 8] = id[7]
156
+ bytes.buffer[offset] = 0xcf
157
+ bytes.buffer[offset + 1] = id[0]
158
+ bytes.buffer[offset + 2] = id[1]
159
+ bytes.buffer[offset + 3] = id[2]
160
+ bytes.buffer[offset + 4] = id[3]
161
+ bytes.buffer[offset + 5] = id[4]
162
+ bytes.buffer[offset + 6] = id[5]
163
+ bytes.buffer[offset + 7] = id[6]
164
+ bytes.buffer[offset + 8] = id[7]
151
165
  }
152
166
 
153
167
  _encodeInteger (bytes, value) {
154
- const buffer = bytes.buffer
155
168
  const offset = bytes.length
156
169
 
157
170
  bytes.reserve(5)
158
171
  bytes.length += 5
159
172
 
160
- buffer[offset] = 0xce
161
- buffer[offset + 1] = value >> 24
162
- buffer[offset + 2] = value >> 16
163
- buffer[offset + 3] = value >> 8
164
- buffer[offset + 4] = value
173
+ bytes.buffer[offset] = 0xce
174
+ bytes.buffer[offset + 1] = value >> 24
175
+ bytes.buffer[offset + 2] = value >> 16
176
+ bytes.buffer[offset + 3] = value >> 8
177
+ bytes.buffer[offset + 4] = value
165
178
  }
166
179
 
167
180
  _encodeLong (bytes, value) {
168
- const buffer = bytes.buffer
169
181
  const offset = bytes.length
170
182
  const hi = (value / Math.pow(2, 32)) >> 0
171
183
  const lo = value >>> 0
@@ -173,40 +185,27 @@ class AgentEncoder {
173
185
  bytes.reserve(9)
174
186
  bytes.length += 9
175
187
 
176
- buffer[offset] = 0xcf
177
- buffer[offset + 1] = hi >> 24
178
- buffer[offset + 2] = hi >> 16
179
- buffer[offset + 3] = hi >> 8
180
- buffer[offset + 4] = hi
181
- buffer[offset + 5] = lo >> 24
182
- buffer[offset + 6] = lo >> 16
183
- buffer[offset + 7] = lo >> 8
184
- buffer[offset + 8] = lo
188
+ bytes.buffer[offset] = 0xcf
189
+ bytes.buffer[offset + 1] = hi >> 24
190
+ bytes.buffer[offset + 2] = hi >> 16
191
+ bytes.buffer[offset + 3] = hi >> 8
192
+ bytes.buffer[offset + 4] = hi
193
+ bytes.buffer[offset + 5] = lo >> 24
194
+ bytes.buffer[offset + 6] = lo >> 16
195
+ bytes.buffer[offset + 7] = lo >> 8
196
+ bytes.buffer[offset + 8] = lo
185
197
  }
186
198
 
187
199
  _encodeMap (bytes, value) {
188
200
  const keys = Object.keys(value)
189
- const buffer = bytes.buffer
190
- const offset = bytes.length
191
-
192
- bytes.reserve(5)
193
- bytes.length += 5
194
-
195
- let length = 0
201
+ const validKeys = keys.filter(key => typeof value[key] === 'string' || typeof value[key] === 'number')
196
202
 
197
- for (const key of keys) {
198
- if (typeof value[key] !== 'string' && typeof value[key] !== 'number') return
199
- length++
203
+ this._encodeMapPrefix(bytes, validKeys.length)
200
204
 
205
+ for (const key of validKeys) {
201
206
  this._encodeString(bytes, key)
202
207
  this._encodeValue(bytes, value[key])
203
208
  }
204
-
205
- buffer[offset] = 0xdf
206
- buffer[offset + 1] = length >> 24
207
- buffer[offset + 2] = length >> 16
208
- buffer[offset + 3] = length >> 8
209
- buffer[offset + 4] = length
210
209
  }
211
210
 
212
211
  _encodeValue (bytes, value) {
@@ -233,21 +232,19 @@ class AgentEncoder {
233
232
  _encodeFloat (bytes, value) {
234
233
  float64Array[0] = value
235
234
 
236
- const buffer = bytes.buffer
237
235
  const offset = bytes.length
238
-
239
236
  bytes.reserve(9)
240
237
  bytes.length += 9
241
238
 
242
- buffer[offset] = 0xcb
239
+ bytes.buffer[offset] = 0xcb
243
240
 
244
241
  if (bigEndian) {
245
242
  for (let i = 0; i <= 7; i++) {
246
- buffer[offset + i + 1] = uInt8Float64Array[i]
243
+ bytes.buffer[offset + i + 1] = uInt8Float64Array[i]
247
244
  }
248
245
  } else {
249
246
  for (let i = 7; i >= 0; i--) {
250
- buffer[bytes.length - i - 1] = uInt8Float64Array[i]
247
+ bytes.buffer[bytes.length - i - 1] = uInt8Float64Array[i]
251
248
  }
252
249
  }
253
250
  }
@@ -1,10 +1,8 @@
1
1
  'use strict'
2
2
  const { truncateSpan, normalizeSpan } = require('./tags-processors')
3
- const Chunk = require('./chunk')
4
3
  const { AgentEncoder } = require('./0.4')
5
4
  const { version: ddTraceVersion } = require('../../../../package.json')
6
5
  const id = require('../../../dd-trace/src/id')
7
-
8
6
  const ENCODING_VERSION = 1
9
7
 
10
8
  const ALLOWED_CONTENT_TYPES = ['test_session_end', 'test_suite_end', 'test']
@@ -12,7 +10,7 @@ const ALLOWED_CONTENT_TYPES = ['test_session_end', 'test_suite_end', 'test']
12
10
  const TEST_SUITE_KEYS_LENGTH = 11
13
11
  const TEST_SESSION_KEYS_LENGTH = 10
14
12
 
15
- const CHUNK_SIZE = 4 * 1024 * 1024 // 4MB
13
+ const INTAKE_SOFT_LIMIT = 2 * 1024 * 1024 // 2MB
16
14
 
17
15
  function formatSpan (span) {
18
16
  let encodingVersion = ENCODING_VERSION
@@ -27,16 +25,11 @@ function formatSpan (span) {
27
25
  }
28
26
 
29
27
  class AgentlessCiVisibilityEncoder extends AgentEncoder {
30
- constructor ({ runtimeId, service, env }) {
31
- super(...arguments)
32
- this._events = []
28
+ constructor (writer, { runtimeId, service, env }) {
29
+ super(writer, INTAKE_SOFT_LIMIT)
33
30
  this.runtimeId = runtimeId
34
31
  this.service = service
35
32
  this.env = env
36
- this._traceBytes = new Chunk(CHUNK_SIZE)
37
- this._stringBytes = new Chunk(CHUNK_SIZE)
38
- this._stringCount = 0
39
- this._stringMap = {}
40
33
 
41
34
  // Used to keep track of the number of encoded events to update the
42
35
  // length of `payload.events` when calling `makePayload`
@@ -186,35 +179,21 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder {
186
179
  const lo = value >>> 0
187
180
  const flag = isPositive ? 0xcf : 0xd3
188
181
 
189
- const buffer = bytes.buffer
190
182
  const offset = bytes.length
191
183
 
192
184
  // int 64
193
185
  bytes.reserve(9)
194
186
  bytes.length += 9
195
187
 
196
- buffer[offset] = flag
197
- buffer[offset + 1] = hi >> 24
198
- buffer[offset + 2] = hi >> 16
199
- buffer[offset + 3] = hi >> 8
200
- buffer[offset + 4] = hi
201
- buffer[offset + 5] = lo >> 24
202
- buffer[offset + 6] = lo >> 16
203
- buffer[offset + 7] = lo >> 8
204
- buffer[offset + 8] = lo
205
- }
206
-
207
- _encodeMapPrefix (bytes, keysLength) {
208
- const buffer = bytes.buffer
209
- const offset = bytes.length
210
-
211
- bytes.reserve(5)
212
- bytes.length += 5
213
- buffer[offset] = 0xdf
214
- buffer[offset + 1] = keysLength >> 24
215
- buffer[offset + 2] = keysLength >> 16
216
- buffer[offset + 3] = keysLength >> 8
217
- buffer[offset + 4] = keysLength
188
+ bytes.buffer[offset] = flag
189
+ bytes.buffer[offset + 1] = hi >> 24
190
+ bytes.buffer[offset + 2] = hi >> 16
191
+ bytes.buffer[offset + 3] = hi >> 8
192
+ bytes.buffer[offset + 4] = hi
193
+ bytes.buffer[offset + 5] = lo >> 24
194
+ bytes.buffer[offset + 6] = lo >> 16
195
+ bytes.buffer[offset + 7] = lo >> 8
196
+ bytes.buffer[offset + 8] = lo
218
197
  }
219
198
 
220
199
  _encode (bytes, trace) {
@@ -240,7 +219,7 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder {
240
219
  const traceSize = bytes.length
241
220
  const buffer = Buffer.allocUnsafe(traceSize)
242
221
 
243
- bytes.buffer.copy(buffer, 0, 0, bytes.length)
222
+ bytes.buffer.copy(buffer, 0, 0, traceSize)
244
223
 
245
224
  this.reset()
246
225
 
@@ -0,0 +1,84 @@
1
+ 'use strict'
2
+ const { AgentEncoder } = require('./0.4')
3
+ const Chunk = require('./chunk')
4
+
5
+ const FormData = require('../exporters/common/form-data')
6
+
7
+ const COVERAGE_PAYLOAD_VERSION = 1
8
+ const COVERAGE_KEYS_LENGTH = 4
9
+ const MAXIMUM_NUM_COVERAGE_FILES = 100
10
+
11
+ class CoverageCIVisibilityEncoder extends AgentEncoder {
12
+ constructor () {
13
+ super(...arguments)
14
+ this.codeCoverageBuffers = []
15
+ this._coverageBytes = new Chunk()
16
+ this.reset()
17
+ }
18
+
19
+ count () {
20
+ return this.codeCoverageBuffers.length
21
+ }
22
+
23
+ encode (coverage) {
24
+ const bytes = this._coverageBytes
25
+ const coverageBuffer = this.encodeCodeCoverage(bytes, coverage)
26
+ this.codeCoverageBuffers.push(coverageBuffer)
27
+ this.reset()
28
+ }
29
+
30
+ encodeCodeCoverage (bytes, coverage) {
31
+ this._encodeMapPrefix(bytes, COVERAGE_KEYS_LENGTH)
32
+ this._encodeString(bytes, 'version')
33
+ this._encodeInteger(bytes, COVERAGE_PAYLOAD_VERSION)
34
+ this._encodeString(bytes, 'trace_id')
35
+ this._encodeId(bytes, coverage.traceId)
36
+ this._encodeString(bytes, 'span_id')
37
+ this._encodeId(bytes, coverage.spanId)
38
+ this._encodeString(bytes, 'files')
39
+ this._encodeArrayPrefix(bytes, coverage.files)
40
+ for (const filename of coverage.files) {
41
+ this._encodeMapPrefix(bytes, 1)
42
+ this._encodeString(bytes, 'filename')
43
+ this._encodeString(bytes, filename)
44
+ }
45
+ const traceSize = bytes.length
46
+ const buffer = Buffer.allocUnsafe(traceSize)
47
+
48
+ bytes.buffer.copy(buffer, 0, 0, bytes.length)
49
+
50
+ return buffer
51
+ }
52
+
53
+ reset () {
54
+ this._reset()
55
+ if (this._coverageBytes) {
56
+ this._coverageBytes.length = 0
57
+ }
58
+ }
59
+
60
+ makePayload () {
61
+ const form = new FormData()
62
+
63
+ let coverageFileIndex = 1
64
+
65
+ for (const coverageBuffer of this.codeCoverageBuffers.slice(0, MAXIMUM_NUM_COVERAGE_FILES)) {
66
+ const coverageFilename = `coverage${coverageFileIndex++}`
67
+ form.append(
68
+ coverageFilename,
69
+ coverageBuffer,
70
+ {
71
+ filename: `${coverageFilename}.msgpack`,
72
+ contentType: 'application/msgpack'
73
+ }
74
+ )
75
+ }
76
+ // 'event' is a backend requirement
77
+ form.append('event', JSON.stringify({}), { filename: 'event.json', contentType: 'application/json' })
78
+ this.codeCoverageBuffers = this.codeCoverageBuffers.slice(MAXIMUM_NUM_COVERAGE_FILES)
79
+
80
+ return form
81
+ }
82
+ }
83
+
84
+ module.exports = { CoverageCIVisibilityEncoder }
@@ -3,17 +3,16 @@
3
3
  const URL = require('url').URL
4
4
  const log = require('../../log')
5
5
  const Writer = require('./writer')
6
- const Scheduler = require('../scheduler')
7
6
 
8
7
  class AgentExporter {
9
- constructor ({ url, hostname, port, flushInterval, lookup, protocolVersion }, prioritySampler) {
8
+ constructor (config, prioritySampler) {
9
+ this._config = config
10
+ const { url, hostname, port, lookup, protocolVersion } = config
10
11
  this._url = url || new URL(`http://${hostname || 'localhost'}:${port}`)
11
12
  this._writer = new Writer({ url: this._url, prioritySampler, lookup, protocolVersion })
12
13
 
13
- if (flushInterval > 0) {
14
- this._scheduler = new Scheduler(() => this._writer.flush(), flushInterval)
15
- }
16
- this._scheduler && this._scheduler.start()
14
+ this._timer = undefined
15
+ process.once('beforeExit', () => this._writer.flush())
17
16
  }
18
17
 
19
18
  setUrl (url) {
@@ -29,8 +28,15 @@ class AgentExporter {
29
28
  export (spans) {
30
29
  this._writer.append(spans)
31
30
 
32
- if (!this._scheduler) {
31
+ const { flushInterval } = this._config
32
+
33
+ if (flushInterval === 0) {
33
34
  this._writer.flush()
35
+ } else if (flushInterval > 0 && !this._timer) {
36
+ this._timer = setTimeout(() => {
37
+ this._writer.flush()
38
+ this._timer = clearTimeout(this._timer)
39
+ }, flushInterval).unref()
34
40
  }
35
41
  }
36
42
  }
@@ -99,7 +99,7 @@ function makeRequest (version, data, count, url, lookup, needsStartupLog, cb) {
99
99
 
100
100
  log.debug(() => `Request to the agent: ${JSON.stringify(options)}`)
101
101
 
102
- request(data, options, true, (err, res, status) => {
102
+ request(data, options, (err, res, status) => {
103
103
  if (needsStartupLog) {
104
104
  // Note that logging will only happen once, regardless of how many times this is called.
105
105
  startupLog({