azify-logger 1.0.26 → 1.0.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +26 -5
- package/index.js +40 -17
- package/middleware-express.js +267 -366
- package/middleware-fastify.js +348 -0
- package/middleware-restify.js +147 -303
- package/package.json +31 -30
- package/queue/fileQueue.js +100 -0
- package/queue/redisQueue.js +179 -0
- package/queue/workerManager.js +111 -0
- package/register-otel.js +63 -13
- package/register.js +364 -99
- package/sampling.js +79 -0
- package/scripts/redis-worker.js +439 -0
- package/server.js +169 -74
- package/store.js +10 -4
- package/streams/bunyan.d.ts +26 -0
- package/streams/bunyan.js +39 -8
- package/streams/httpQueue.js +342 -0
- package/streams/pino.d.ts +38 -0
- package/streams/pino.js +44 -7
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
const fs = require('fs')
|
|
2
|
+
const path = require('path')
|
|
3
|
+
|
|
4
|
+
function createFileSpool(options = {}) {
|
|
5
|
+
const directory = options.directory || path.join(process.cwd(), '.azify-logger-spool')
|
|
6
|
+
const flushInterval = options.flushInterval || 5000
|
|
7
|
+
const batchSize = options.batchSize || 100
|
|
8
|
+
const pushFn = typeof options.pushFn === 'function' ? options.pushFn : null
|
|
9
|
+
|
|
10
|
+
if (!pushFn) {
|
|
11
|
+
return null
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
fs.mkdirSync(directory, { recursive: true })
|
|
15
|
+
|
|
16
|
+
let scheduled = false
|
|
17
|
+
let flushing = false
|
|
18
|
+
|
|
19
|
+
const scheduleFlush = () => {
|
|
20
|
+
if (scheduled) {
|
|
21
|
+
return
|
|
22
|
+
}
|
|
23
|
+
scheduled = true
|
|
24
|
+
const timer = setTimeout(async () => {
|
|
25
|
+
scheduled = false
|
|
26
|
+
await flush().catch(() => {})
|
|
27
|
+
}, flushInterval)
|
|
28
|
+
if (typeof timer.unref === 'function') {
|
|
29
|
+
timer.unref()
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
async function append(entry) {
|
|
34
|
+
const filePath = path.join(directory, `spool-${process.pid}.ndjson`)
|
|
35
|
+
await fs.promises.appendFile(filePath, JSON.stringify(entry) + '\n')
|
|
36
|
+
scheduleFlush()
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
async function flush() {
|
|
40
|
+
if (flushing) {
|
|
41
|
+
return
|
|
42
|
+
}
|
|
43
|
+
flushing = true
|
|
44
|
+
try {
|
|
45
|
+
const files = (await fs.promises.readdir(directory)).filter((file) => file.endsWith('.ndjson'))
|
|
46
|
+
for (const file of files) {
|
|
47
|
+
const fullPath = path.join(directory, file)
|
|
48
|
+
const drainingPath = `${fullPath}.draining`
|
|
49
|
+
try {
|
|
50
|
+
await fs.promises.rename(fullPath, drainingPath)
|
|
51
|
+
} catch (err) {
|
|
52
|
+
if (err.code === 'ENOENT') {
|
|
53
|
+
continue
|
|
54
|
+
}
|
|
55
|
+
throw err
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const content = await fs.promises.readFile(drainingPath, 'utf8').catch(() => '')
|
|
59
|
+
if (!content) {
|
|
60
|
+
await fs.promises.unlink(drainingPath).catch(() => {})
|
|
61
|
+
continue
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
const lines = content.split('\n').filter(Boolean)
|
|
65
|
+
while (lines.length) {
|
|
66
|
+
const slice = lines.splice(0, batchSize)
|
|
67
|
+
const entries = slice.map((line) => {
|
|
68
|
+
try {
|
|
69
|
+
return JSON.parse(line)
|
|
70
|
+
} catch (_) {
|
|
71
|
+
return null
|
|
72
|
+
}
|
|
73
|
+
}).filter(Boolean)
|
|
74
|
+
if (entries.length > 0) {
|
|
75
|
+
await pushFn(entries).catch(async () => {
|
|
76
|
+
const remaining = entries.map((entry) => JSON.stringify(entry)).join('\n') + '\n'
|
|
77
|
+
await fs.promises.appendFile(fullPath, remaining)
|
|
78
|
+
throw new Error('Failed to flush file spool entries back to Redis')
|
|
79
|
+
})
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
await fs.promises.unlink(drainingPath).catch(() => {})
|
|
84
|
+
}
|
|
85
|
+
} finally {
|
|
86
|
+
flushing = false
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return {
|
|
91
|
+
append,
|
|
92
|
+
flush
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
module.exports = {
|
|
97
|
+
createFileSpool
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
const Redis = require('ioredis')
|
|
2
|
+
|
|
3
|
+
const DEFAULT_REDIS_URL = process.env.AZIFY_LOGGER_REDIS_URL || 'redis://localhost:6381'
|
|
4
|
+
const DEFAULT_STREAM_KEY = process.env.AZIFY_LOGGER_REDIS_STREAM || process.env.AZIFY_LOGGER_REDIS_QUEUE_KEY || 'azify-logger:stream'
|
|
5
|
+
const DEFAULT_MAXLEN = Number(process.env.AZIFY_LOGGER_REDIS_MAX_STREAM_LENGTH) || 100000
|
|
6
|
+
|
|
7
|
+
const defaultRedisOptions = {
|
|
8
|
+
enableAutoPipelining: true,
|
|
9
|
+
maxRetriesPerRequest: null,
|
|
10
|
+
retryStrategy(times) {
|
|
11
|
+
const delay = Math.min(1000 * Math.pow(1.5, times), 10000)
|
|
12
|
+
return delay
|
|
13
|
+
},
|
|
14
|
+
reconnectOnError(err) {
|
|
15
|
+
const msg = err && err.message ? err.message : ''
|
|
16
|
+
if (msg.includes('READONLY') || msg.includes('ECONNRESET') || msg.includes('ECONNREFUSED')) {
|
|
17
|
+
return true
|
|
18
|
+
}
|
|
19
|
+
return false
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function createRedisProducer(config = {}) {
|
|
24
|
+
const url = config.url || process.env.AZIFY_LOGGER_REDIS_URL || DEFAULT_REDIS_URL
|
|
25
|
+
if (!url) {
|
|
26
|
+
throw new Error('Redis URL is required to initialize the azify-logger Redis producer.')
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
const streamKey = config.streamKey || DEFAULT_STREAM_KEY
|
|
30
|
+
const maxLen = Number.isFinite(config.maxLen) ? config.maxLen : DEFAULT_MAXLEN
|
|
31
|
+
const redisOptions = { ...defaultRedisOptions, ...(config.redisOptions || {}) }
|
|
32
|
+
|
|
33
|
+
const client = new Redis(url, redisOptions)
|
|
34
|
+
let lastConnectionErrorLog = 0
|
|
35
|
+
let lastEnqueueErrorLog = 0
|
|
36
|
+
let connectionErrorCount = 0
|
|
37
|
+
const ERROR_LOG_INTERVAL = 300000
|
|
38
|
+
|
|
39
|
+
if (typeof global.__azifyLoggerRedisErrorLogged === 'undefined') {
|
|
40
|
+
global.__azifyLoggerRedisErrorLogged = false
|
|
41
|
+
global.__azifyLoggerRedisErrorLastLog = 0
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
client.on('error', (err) => {
|
|
45
|
+
const now = Date.now()
|
|
46
|
+
if (!global.__azifyLoggerRedisErrorLogged && now - global.__azifyLoggerRedisErrorLastLog > ERROR_LOG_INTERVAL) {
|
|
47
|
+
if (err && (err.code === 'ECONNREFUSED' || err.message?.includes('ECONNREFUSED') || err.message?.includes('Redis'))) {
|
|
48
|
+
global.__azifyLoggerRedisErrorLogged = true
|
|
49
|
+
global.__azifyLoggerRedisErrorLastLog = now
|
|
50
|
+
connectionErrorCount++
|
|
51
|
+
process.stderr.write('[azify-logger] ⚠️ Redis indisponível. O sistema de logging está desabilitado. A aplicação continua funcionando normalmente.\n')
|
|
52
|
+
lastConnectionErrorLog = now
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
})
|
|
56
|
+
client.on('end', () => {
|
|
57
|
+
})
|
|
58
|
+
client.on('connect', () => {
|
|
59
|
+
if (connectionErrorCount > 0 || global.__azifyLoggerRedisErrorLogged) {
|
|
60
|
+
connectionErrorCount = 0
|
|
61
|
+
lastConnectionErrorLog = 0
|
|
62
|
+
global.__azifyLoggerRedisErrorLogged = false
|
|
63
|
+
global.__azifyLoggerRedisErrorLastLog = 0
|
|
64
|
+
}
|
|
65
|
+
})
|
|
66
|
+
|
|
67
|
+
const batch = []
|
|
68
|
+
let batchTimer = null
|
|
69
|
+
const BATCH_SIZE = 1
|
|
70
|
+
const BATCH_TIMEOUT = 0
|
|
71
|
+
let flushing = false
|
|
72
|
+
|
|
73
|
+
function flushBatch() {
|
|
74
|
+
if (flushing || batch.length === 0) return
|
|
75
|
+
flushing = true
|
|
76
|
+
|
|
77
|
+
const entriesToFlush = batch.splice(0, BATCH_SIZE)
|
|
78
|
+
if (!entriesToFlush.length) {
|
|
79
|
+
flushing = false
|
|
80
|
+
return
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
setImmediate(() => {
|
|
84
|
+
if (entriesToFlush.length === 1) {
|
|
85
|
+
const entry = entriesToFlush[0]
|
|
86
|
+
if (entry) {
|
|
87
|
+
const payload = JSON.stringify(entry)
|
|
88
|
+
client.xadd(streamKey, 'MAXLEN', '~', maxLen, '*', 'entry', payload).catch(() => {})
|
|
89
|
+
}
|
|
90
|
+
} else {
|
|
91
|
+
const pipeline = client.pipeline()
|
|
92
|
+
let validCount = 0
|
|
93
|
+
|
|
94
|
+
for (let i = 0; i < entriesToFlush.length; i++) {
|
|
95
|
+
const entry = entriesToFlush[i]
|
|
96
|
+
if (!entry) continue
|
|
97
|
+
const payload = JSON.stringify(entry)
|
|
98
|
+
pipeline.xadd(streamKey, 'MAXLEN', '~', maxLen, '*', 'entry', payload)
|
|
99
|
+
validCount++
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
if (validCount > 0) {
|
|
103
|
+
pipeline.exec().catch(() => {})
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
flushing = false
|
|
108
|
+
if (batch.length >= BATCH_SIZE) {
|
|
109
|
+
flushBatch()
|
|
110
|
+
} else if (batch.length > 0) {
|
|
111
|
+
scheduleFlush()
|
|
112
|
+
}
|
|
113
|
+
})
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
function scheduleFlush() {
|
|
117
|
+
if (batchTimer || flushing) return
|
|
118
|
+
|
|
119
|
+
if (batch.length >= BATCH_SIZE) {
|
|
120
|
+
flushBatch()
|
|
121
|
+
} else if (BATCH_TIMEOUT === 0) {
|
|
122
|
+
setImmediate(() => {
|
|
123
|
+
if (!flushing) {
|
|
124
|
+
flushBatch()
|
|
125
|
+
}
|
|
126
|
+
})
|
|
127
|
+
} else {
|
|
128
|
+
batchTimer = setTimeout(() => {
|
|
129
|
+
batchTimer = null
|
|
130
|
+
flushBatch()
|
|
131
|
+
}, BATCH_TIMEOUT)
|
|
132
|
+
|
|
133
|
+
if (typeof batchTimer.unref === 'function') {
|
|
134
|
+
batchTimer.unref()
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
function enqueue(entry) {
|
|
140
|
+
batch.push(entry)
|
|
141
|
+
|
|
142
|
+
if (batch.length >= BATCH_SIZE) {
|
|
143
|
+
if (batchTimer) {
|
|
144
|
+
clearTimeout(batchTimer)
|
|
145
|
+
batchTimer = null
|
|
146
|
+
}
|
|
147
|
+
flushBatch()
|
|
148
|
+
} else if (batch.length === 1) {
|
|
149
|
+
scheduleFlush()
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
async function close() {
|
|
154
|
+
if (batchTimer) {
|
|
155
|
+
clearTimeout(batchTimer)
|
|
156
|
+
batchTimer = null
|
|
157
|
+
}
|
|
158
|
+
while (batch.length > 0) {
|
|
159
|
+
flushBatch()
|
|
160
|
+
await new Promise(resolve => setTimeout(resolve, 10))
|
|
161
|
+
}
|
|
162
|
+
await client.quit().catch(() => {})
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
return {
|
|
166
|
+
enqueue,
|
|
167
|
+
client,
|
|
168
|
+
streamKey,
|
|
169
|
+
close,
|
|
170
|
+
flush: flushBatch
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
module.exports = {
|
|
175
|
+
createRedisProducer,
|
|
176
|
+
DEFAULT_REDIS_URL,
|
|
177
|
+
DEFAULT_STREAM_KEY,
|
|
178
|
+
DEFAULT_MAXLEN
|
|
179
|
+
}
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
const { fork } = require('child_process')
|
|
2
|
+
const path = require('path')
|
|
3
|
+
|
|
4
|
+
let workerProcess = null
|
|
5
|
+
let restarting = false
|
|
6
|
+
|
|
7
|
+
function buildEnv(redisConfig = {}, extraEnv = {}) {
|
|
8
|
+
const env = { ...process.env, ...extraEnv }
|
|
9
|
+
|
|
10
|
+
if (redisConfig.url) env.AZIFY_LOGGER_REDIS_URL = String(redisConfig.url)
|
|
11
|
+
if (redisConfig.streamKey) env.AZIFY_LOGGER_REDIS_STREAM = String(redisConfig.streamKey)
|
|
12
|
+
if (redisConfig.streamKey) env.AZIFY_LOGGER_REDIS_QUEUE_KEY = String(redisConfig.streamKey)
|
|
13
|
+
if (redisConfig.maxLen != null) env.AZIFY_LOGGER_REDIS_MAX_STREAM_LENGTH = String(redisConfig.maxLen)
|
|
14
|
+
|
|
15
|
+
return env
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
function spawnWorker(redisConfig, options = {}) {
|
|
19
|
+
if (workerProcess && !workerProcess.killed) {
|
|
20
|
+
return workerProcess
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const scriptPath = path.resolve(__dirname, '..', 'scripts', 'redis-worker.js')
|
|
24
|
+
const env = buildEnv(redisConfig, options.env)
|
|
25
|
+
const stdio = options.stdio || 'inherit'
|
|
26
|
+
const autoRestart = options.autoRestart !== false
|
|
27
|
+
const restartDelay = options.restartDelay ?? 1000
|
|
28
|
+
|
|
29
|
+
try {
|
|
30
|
+
workerProcess = fork(scriptPath, [], { env, stdio })
|
|
31
|
+
} catch (error) {
|
|
32
|
+
console.error('[azify-logger] Falha ao iniciar worker Redis:', error)
|
|
33
|
+
workerProcess = null
|
|
34
|
+
return null
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const cleanup = () => {
|
|
38
|
+
if (workerProcess && !workerProcess.killed) {
|
|
39
|
+
workerProcess.kill()
|
|
40
|
+
workerProcess = null
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
process.once('exit', cleanup)
|
|
45
|
+
process.once('SIGINT', () => {
|
|
46
|
+
cleanup()
|
|
47
|
+
process.exit(0)
|
|
48
|
+
})
|
|
49
|
+
process.once('SIGTERM', () => {
|
|
50
|
+
cleanup()
|
|
51
|
+
process.exit(0)
|
|
52
|
+
})
|
|
53
|
+
|
|
54
|
+
if (stdio !== 'inherit') {
|
|
55
|
+
workerProcess.on('message', (msg) => {
|
|
56
|
+
if (msg && msg.type === 'azify-logger:ready') {
|
|
57
|
+
console.log('[azify-logger] Worker Redis ativo')
|
|
58
|
+
}
|
|
59
|
+
})
|
|
60
|
+
} else {
|
|
61
|
+
console.log('[azify-logger] Worker Redis iniciado (PID', workerProcess.pid, ')')
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
workerProcess.on('exit', (code, signal) => {
|
|
65
|
+
const reason = signal || code
|
|
66
|
+
console.warn('[azify-logger] Worker Redis finalizado:', reason)
|
|
67
|
+
workerProcess = null
|
|
68
|
+
if (autoRestart && !restarting && process.env.AZIFY_LOGGER_EMBEDDED_WORKER !== '0') {
|
|
69
|
+
restarting = true
|
|
70
|
+
setTimeout(() => {
|
|
71
|
+
restarting = false
|
|
72
|
+
spawnWorker(redisConfig, options)
|
|
73
|
+
}, restartDelay)
|
|
74
|
+
}
|
|
75
|
+
})
|
|
76
|
+
|
|
77
|
+
workerProcess.on('error', (error) => {
|
|
78
|
+
console.error('[azify-logger] Erro no worker Redis:', error)
|
|
79
|
+
})
|
|
80
|
+
|
|
81
|
+
return workerProcess
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
function ensureWorker(redisConfig, options = {}) {
|
|
85
|
+
if (process.env.AZIFY_LOGGER_EMBEDDED_WORKER === '0') {
|
|
86
|
+
return null
|
|
87
|
+
}
|
|
88
|
+
return spawnWorker(redisConfig, options)
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
function startLoggerWorker(options = {}) {
|
|
92
|
+
return spawnWorker(options.redisConfig || {}, options)
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
function stopLoggerWorker() {
|
|
96
|
+
if (workerProcess && !workerProcess.killed) {
|
|
97
|
+
workerProcess.kill()
|
|
98
|
+
workerProcess = null
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
function getWorkerProcess() {
|
|
103
|
+
return workerProcess
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
module.exports = {
|
|
107
|
+
ensureWorker,
|
|
108
|
+
startLoggerWorker,
|
|
109
|
+
stopLoggerWorker,
|
|
110
|
+
getWorkerProcess
|
|
111
|
+
}
|
package/register-otel.js
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
try {
|
|
2
2
|
const { NodeSDK } = require('@opentelemetry/sdk-node')
|
|
3
|
-
const {
|
|
3
|
+
const { HttpInstrumentation } = require('@opentelemetry/instrumentation-http')
|
|
4
|
+
const { ExpressInstrumentation } = require('@opentelemetry/instrumentation-express')
|
|
5
|
+
const { RestifyInstrumentation } = require('@opentelemetry/instrumentation-restify')
|
|
4
6
|
|
|
5
7
|
const serviceName = process.env.OTEL_SERVICE_NAME || process.env.APP_NAME || 'app'
|
|
6
8
|
const serviceVersion = process.env.OTEL_SERVICE_VERSION || '1.0.0'
|
|
@@ -8,22 +10,70 @@ try {
|
|
|
8
10
|
process.env.OTEL_SERVICE_NAME = serviceName
|
|
9
11
|
process.env.OTEL_SERVICE_VERSION = serviceVersion
|
|
10
12
|
|
|
13
|
+
let collectorHost = null
|
|
14
|
+
let collectorPath = null
|
|
15
|
+
try {
|
|
16
|
+
const target = new URL(process.env.AZIFY_LOGGER_URL || 'http://localhost:3001/log')
|
|
17
|
+
collectorHost = target.host
|
|
18
|
+
collectorPath = target.pathname || '/'
|
|
19
|
+
} catch (_) {}
|
|
20
|
+
|
|
21
|
+
const isLoggerRequest = (host, path) => {
|
|
22
|
+
if (!collectorHost || typeof host !== 'string') return false
|
|
23
|
+
if (host !== collectorHost) return false
|
|
24
|
+
return typeof path === 'string' && path.startsWith(collectorPath)
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const httpInstrumentation = new HttpInstrumentation({
|
|
28
|
+
enabled: true,
|
|
29
|
+
requireParentforOutgoingSpans: false,
|
|
30
|
+
requireParentforIncomingSpans: false,
|
|
31
|
+
ignoreIncomingRequestHook (req) {
|
|
32
|
+
if (!collectorHost) return false
|
|
33
|
+
const host = req.headers?.host
|
|
34
|
+
const url = req.url
|
|
35
|
+
return typeof host === 'string' && isLoggerRequest(host, url)
|
|
36
|
+
},
|
|
37
|
+
ignoreOutgoingRequestHook (res) {
|
|
38
|
+
try {
|
|
39
|
+
if (!collectorHost) return false
|
|
40
|
+
const host = res?.host
|
|
41
|
+
const path = res?.path
|
|
42
|
+
return isLoggerRequest(host, path)
|
|
43
|
+
} catch (_) {
|
|
44
|
+
return false
|
|
45
|
+
}
|
|
46
|
+
},
|
|
47
|
+
requestHook (span, { options }) {
|
|
48
|
+
if (!options?.headers) return
|
|
49
|
+
const requestId = options.headers['x-request-id'] || options.headers['X-Request-ID']
|
|
50
|
+
if (requestId) {
|
|
51
|
+
span.setAttribute('azify.request_id', requestId)
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
})
|
|
55
|
+
|
|
56
|
+
const expressInstrumentation = new ExpressInstrumentation({
|
|
57
|
+
enabled: true,
|
|
58
|
+
ignoreLayersType: ['router', 'middleware']
|
|
59
|
+
})
|
|
60
|
+
|
|
61
|
+
const restifyInstrumentation = new RestifyInstrumentation({
|
|
62
|
+
enabled: true
|
|
63
|
+
})
|
|
64
|
+
|
|
11
65
|
const sdk = new NodeSDK({
|
|
12
|
-
serviceName
|
|
13
|
-
serviceVersion
|
|
14
|
-
instrumentations: [
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
},
|
|
20
|
-
'@opentelemetry/instrumentation-express': { enabled: true },
|
|
21
|
-
'@opentelemetry/instrumentation-restify': { enabled: true }
|
|
22
|
-
})]
|
|
66
|
+
serviceName,
|
|
67
|
+
serviceVersion,
|
|
68
|
+
instrumentations: [
|
|
69
|
+
httpInstrumentation,
|
|
70
|
+
expressInstrumentation,
|
|
71
|
+
restifyInstrumentation
|
|
72
|
+
]
|
|
23
73
|
})
|
|
24
74
|
|
|
25
75
|
sdk.start()
|
|
26
|
-
|
|
76
|
+
|
|
27
77
|
process.once('SIGTERM', () => {
|
|
28
78
|
sdk.shutdown()
|
|
29
79
|
.finally(() => process.exit(0))
|