azify-logger 1.0.37 → 1.0.39

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -72,6 +72,8 @@ await fastify.listen({ port: 3000 })
72
72
  |----------|------------------------------------|-----------|
73
73
  | `APP_NAME` | - | Nome da aplicação |
74
74
  | `AZIFY_LOGGER_URL` | `http://localhost:3001/log` | URL do logger |
75
+ | `AZIFY_LOGGER_REDIS_URL` | `redis://localhost:6381` | URL do Redis (fila de logs) |
76
+ | `AZIFY_LOGGER_REDIS_PASSWORD` | — | **Obrigatório só em produção.** Em `NODE_ENV=production` o Redis exige senha; em dev/staging é opcional. |
75
77
  | `OTEL_EXPORTER_OTLP_ENDPOINT` | `http://localhost:4318/v1/traces` | Endpoint OTLP para traces (opcional) |
76
78
  | `NODE_ENV` | `development` | Ambiente |
77
79
 
@@ -144,29 +144,14 @@ function createExpressLoggingMiddleware(options = {}) {
144
144
  return worker
145
145
  }
146
146
 
147
- async function sendLog(level, message, meta = {}) {
148
- if (!transport || typeof transport.enqueue !== 'function') {
149
- try {
150
- console.log(`[AZIFY-LOGGER-FALLBACK] ${message}`, JSON.stringify(meta, null, 2))
151
- } catch (_) {
152
- }
153
- return
154
- }
147
+ function sendLog(level, message, meta = {}) {
148
+ if (!transport || typeof transport.enqueue !== 'function') return
155
149
 
156
- try {
157
- // AGUARDAR o enqueue para garantir que o log seja adicionado ao Redis antes de continuar
158
- // transport.enqueue agora é assíncrono e sempre retorna uma Promise
159
- await transport.enqueue({
160
- level,
161
- message,
162
- meta
163
- }, { 'content-type': 'application/json' })
164
- } catch (err) {
165
- try {
166
- console.log(`[AZIFY-LOGGER-FALLBACK] ${message}`, JSON.stringify(meta, null, 2))
167
- } catch (_) {
168
- }
169
- }
150
+ transport.enqueue({
151
+ level,
152
+ message,
153
+ meta
154
+ }, { 'content-type': 'application/json' })
170
155
  }
171
156
 
172
157
  return function azifyExpressLoggingMiddleware(req, res, next) {
@@ -225,14 +210,14 @@ function createExpressLoggingMiddleware(options = {}) {
225
210
 
226
211
  const ctx = ensureRequestContext()
227
212
 
228
- const logWithContext = async (level, message, meta) => {
213
+ const logWithContext = (level, message, meta) => {
229
214
  const otelCtx = getOtelTraceContext()
230
215
  const ctx = getRequestContext() || ensureRequestContext()
231
216
 
232
217
  meta.traceId = otelCtx?.traceId || meta.traceId || ctx.traceId
233
218
  meta.spanId = otelCtx?.spanId || meta.spanId || ctx.spanId
234
219
  meta.parentSpanId = otelCtx?.parentSpanId || meta.parentSpanId || ctx.parentSpanId
235
- await sendLog(level, message, meta)
220
+ sendLog(level, message, meta)
236
221
  }
237
222
 
238
223
  function ensureIds() {
@@ -270,20 +255,19 @@ function createExpressLoggingMiddleware(options = {}) {
270
255
  }
271
256
  }
272
257
 
273
- async function emitResponseLog(meta, chunk) {
258
+ function emitResponseLog(meta, chunk) {
274
259
  if (!config.captureResponseBody || chunk == null) {
275
- await logWithContext('info', `[RESPONSE] ${method} ${url}`, meta)
260
+ logWithContext('info', `[RESPONSE] ${method} ${url}`, meta)
276
261
  return
277
262
  }
278
263
 
279
264
  if (!meta.response) meta.response = {}
280
265
  meta.response.body = safeSerializeBody(chunk)
281
- await logWithContext('info', `[RESPONSE] ${method} ${url}`, meta)
266
+ logWithContext('info', `[RESPONSE] ${method} ${url}`, meta)
282
267
  }
283
268
 
284
269
  if (config.logRequest) {
285
- let requestLogEmitted = false
286
- try {
270
+ process.nextTick(() => {
287
271
  ensureIds()
288
272
  ensureHeaders()
289
273
 
@@ -316,159 +300,94 @@ function createExpressLoggingMiddleware(options = {}) {
316
300
  if (config.environment) meta.environment = config.environment
317
301
 
318
302
  logWithContext('info', `[REQUEST] ${method} ${url}`, meta)
319
- requestLogEmitted = true
320
- } catch (err) {
321
- try {
322
- ensureIds()
323
- const minimalMeta = {
324
- traceId: reqCtx?.traceId || traceId,
325
- spanId: reqCtx?.spanId || spanId,
326
- parentSpanId: reqCtx?.parentSpanId || parentSpanId || null,
327
- requestId: requestId || fastUUID(),
328
- method,
329
- url,
330
- path,
331
- timestamp: Date.now(),
332
- hostname
333
- }
334
- if (serviceObj) minimalMeta.service = serviceObj
335
- if (config.environment) minimalMeta.environment = config.environment
336
- logWithContext('info', `[REQUEST] ${method} ${url}`, minimalMeta)
337
- requestLogEmitted = true
338
- } catch (_) {
339
- try {
340
- sendLog('info', `[REQUEST] ${method} ${url}`, {
341
- method,
342
- url,
343
- path
344
- })
345
- requestLogEmitted = true
346
- } catch (_) {
347
- try {
348
- console.log(`[AZIFY-LOGGER-EMERGENCY] [REQUEST] ${method} ${url}`)
349
- } catch (_) {
350
- }
351
- }
352
- }
353
- }
354
-
355
- if (!requestLogEmitted) {
356
- try {
357
- console.log(`[AZIFY-LOGGER-EMERGENCY] [REQUEST] ${method} ${url}`)
358
- } catch (_) {
359
- }
360
- }
303
+ })
361
304
  }
362
305
 
363
- async function emitResponse() {
364
- if (logSent) return
365
- logSent = true
306
+ const originalEnd = res.end.bind(res)
307
+
308
+ res.end = (chunk, encoding) => {
309
+ const result = originalEnd(chunk, encoding)
310
+
311
+ if (chunk != null && config.captureResponseBody && !responseChunkCaptured) {
312
+ responseChunk = chunk
313
+ responseChunkCaptured = true
314
+ }
366
315
 
367
- try {
368
- ensureIds()
369
-
370
- const statusCode = res.statusCode || 200
371
- const duration = Date.now() - startTime
372
- const response = { statusCode, durationMs: duration }
316
+ if (!logSent) {
317
+ logSent = true
373
318
 
374
- if (config.captureHeaders) {
375
- ensureHeaders()
376
- }
377
-
378
- const requestObj = {
379
- id: requestId,
380
- method,
381
- url,
382
- path,
383
- ip: clientIp
384
- }
385
- if (query) requestObj.query = query
386
- if (config.captureHeaders && cachedHeaders) requestObj.headers = cachedHeaders
387
-
388
- const meta = {
389
- traceId: reqCtx.traceId,
390
- spanId: reqCtx.spanId,
391
- parentSpanId: reqCtx.parentSpanId || null,
392
- requestId,
393
- request: requestObj,
394
- response,
395
- timestamp: Date.now(),
396
- hostname
397
- }
398
- if (serviceObj) meta.service = serviceObj
399
- if (config.environment) meta.environment = config.environment
400
-
401
- const chunkToProcess = (responseChunk !== null && responseChunkCaptured) ? responseChunk : null
402
- await emitResponseLog(meta, chunkToProcess)
403
- } catch (err) {
404
- try {
405
- ensureIds()
406
- const statusCode = res.statusCode || 200
407
- const duration = Date.now() - startTime
408
- const response = { statusCode, durationMs: duration }
409
- const requestObj = {
410
- id: requestId,
411
- method,
412
- url,
413
- path,
414
- ip: clientIp || 'unknown'
415
- }
416
- const meta = {
417
- traceId: reqCtx?.traceId || traceId,
418
- spanId: reqCtx?.spanId || spanId,
419
- parentSpanId: reqCtx?.parentSpanId || parentSpanId || null,
420
- requestId: requestId || fastUUID(),
421
- request: requestObj,
422
- response,
423
- timestamp: Date.now(),
424
- hostname
425
- }
426
- if (serviceObj) meta.service = serviceObj
427
- if (config.environment) meta.environment = config.environment
428
- await emitResponseLog(meta, null)
429
- } catch (fallbackErr) {
319
+ process.nextTick(() => {
430
320
  try {
431
321
  ensureIds()
322
+
432
323
  const statusCode = res.statusCode || 200
433
324
  const duration = Date.now() - startTime
434
- await sendLog('info', `[RESPONSE] ${method} ${url}`, {
435
- traceId: reqCtx?.traceId || traceId,
436
- spanId: reqCtx?.spanId || spanId,
437
- parentSpanId: reqCtx?.parentSpanId || parentSpanId || null,
438
- requestId: requestId || fastUUID(),
325
+ const response = { statusCode, durationMs: duration }
326
+
327
+ if (config.captureHeaders) {
328
+ ensureHeaders()
329
+ }
330
+
331
+ const requestObj = {
332
+ id: requestId,
439
333
  method,
440
334
  url,
441
335
  path,
442
- statusCode,
443
- durationMs: duration,
336
+ ip: clientIp
337
+ }
338
+ if (query) requestObj.query = query
339
+ if (config.captureHeaders && cachedHeaders) requestObj.headers = cachedHeaders
340
+
341
+ const meta = {
342
+ traceId: reqCtx.traceId,
343
+ spanId: reqCtx.spanId,
344
+ parentSpanId: reqCtx.parentSpanId || null,
345
+ requestId,
346
+ request: requestObj,
347
+ response,
444
348
  timestamp: Date.now(),
445
349
  hostname
446
- })
447
- } catch (_) {
350
+ }
351
+ if (serviceObj) meta.service = serviceObj
352
+ if (config.environment) meta.environment = config.environment
353
+
354
+ const chunkToProcess = (responseChunk !== null && responseChunkCaptured) ? responseChunk : null
355
+ emitResponseLog(meta, chunkToProcess)
356
+ } catch (err) {
357
+ try {
358
+ ensureIds()
359
+ const statusCode = res.statusCode || 200
360
+ const duration = Date.now() - startTime
361
+ const response = { statusCode, durationMs: duration }
362
+ const requestObj = {
363
+ id: requestId,
364
+ method,
365
+ url,
366
+ path,
367
+ ip: clientIp || 'unknown'
368
+ }
369
+ const meta = {
370
+ traceId: reqCtx?.traceId || traceId,
371
+ spanId: reqCtx?.spanId || spanId,
372
+ parentSpanId: reqCtx?.parentSpanId || parentSpanId || null,
373
+ requestId: requestId || fastUUID(),
374
+ request: requestObj,
375
+ response,
376
+ timestamp: Date.now(),
377
+ hostname
378
+ }
379
+ if (serviceObj) meta.service = serviceObj
380
+ if (config.environment) meta.environment = config.environment
381
+ emitResponseLog(meta, null)
382
+ } catch (_) {
383
+ }
448
384
  }
449
- }
450
- }
451
- }
452
-
453
- const originalEnd = res.end.bind(res)
454
-
455
- res.end = (chunk, encoding) => {
456
- if (chunk != null && config.captureResponseBody && !responseChunkCaptured) {
457
- responseChunk = chunk
458
- responseChunkCaptured = true
385
+ })
459
386
  }
460
387
 
461
- const result = originalEnd(chunk, encoding)
462
- void emitResponse()
463
388
  return result
464
389
  }
465
390
 
466
- // Também capturar o evento 'finish' para garantir que o RESPONSE seja logado
467
- // mesmo quando res.json() ou res.send() são usados sem chamar res.end() explicitamente
468
- res.once('finish', () => {
469
- void emitResponse()
470
- })
471
-
472
391
  runWithRequestContext(ctx, () => {
473
392
  next()
474
393
  })
@@ -1,5 +1,4 @@
1
1
  const { startRequestContext, runWithRequestContext, getRequestContext } = require('./store')
2
- const { createHttpLoggerTransport } = require('./streams/httpQueue')
3
2
  const os = require('os')
4
3
 
5
4
  let trace, otelContext
@@ -86,18 +85,20 @@ function createFastifyLoggingPlugin(options = {}) {
86
85
  captureHeaders: options.captureHeaders !== undefined ? options.captureHeaders : process.env.AZIFY_LOGGER_CAPTURE_HEADERS === 'true'
87
86
  }
88
87
 
89
- const transport = createHttpLoggerTransport(config.loggerUrl, {})
90
88
  const hostname = os.hostname()
91
89
  const serviceObj = config.serviceName ? { name: config.serviceName, version: '1.0.0' } : null
92
90
 
93
91
  function sendLog(level, message, meta = {}) {
94
- if (!transport || typeof transport.enqueue !== 'function') return
95
-
96
- transport.enqueue({
97
- level,
98
- message,
99
- meta
100
- }, { 'content-type': 'application/json' })
92
+ if (!config.loggerUrl) return
93
+ try {
94
+ const payload = { level, message, meta }
95
+ fetch(config.loggerUrl, {
96
+ method: 'POST',
97
+ headers: { 'Content-Type': 'application/json' },
98
+ body: JSON.stringify(payload),
99
+ signal: AbortSignal.timeout(3000)
100
+ }).catch(() => {})
101
+ } catch (_) {}
101
102
  }
102
103
 
103
104
  return async function azifyFastifyPlugin(fastify, opts) {
@@ -288,6 +289,7 @@ function createFastifyLoggingPlugin(options = {}) {
288
289
  const url = logger.url
289
290
  const path = logger.path
290
291
 
292
+ let reqCtx = null
291
293
  let requestId, traceId, spanId, parentSpanId, clientIp, query, cachedHeaders
292
294
  let idsCreated = false
293
295
  let headersCached = false
@@ -431,5 +433,8 @@ function createFastifyLoggingPlugin(options = {}) {
431
433
  }
432
434
  }
433
435
 
434
- module.exports = createFastifyLoggingPlugin
436
+ const fp = require('fastify-plugin')
437
+ module.exports = function (options) {
438
+ return fp(createFastifyLoggingPlugin(options))
439
+ }
435
440
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "azify-logger",
3
- "version": "1.0.37",
3
+ "version": "1.0.39",
4
4
  "description": "Azify Logger Client - Centralized logging for OpenSearch",
5
5
  "main": "index.js",
6
6
  "types": "index.d.ts",
@@ -53,8 +53,11 @@
53
53
  "adm-zip": "^0.5.16",
54
54
  "archiver": "^6.0.1",
55
55
  "axios": "^1.6.0",
56
+ "bull": "^4.16.5",
56
57
  "cors": "^2.8.5",
58
+ "dotenv": "^17.2.3",
57
59
  "express": "^4.18.2",
60
+ "fastify-plugin": "^5.0.0",
58
61
  "express-session": "^1.17.3",
59
62
  "ioredis": "^5.8.2",
60
63
  "js-yaml": "^4.1.0",
@@ -28,8 +28,17 @@ function createRedisProducer(config = {}) {
28
28
 
29
29
  const streamKey = config.streamKey || DEFAULT_STREAM_KEY
30
30
  const maxLen = Number.isFinite(config.maxLen) ? config.maxLen : DEFAULT_MAXLEN
31
+ const password = config.password ?? process.env.AZIFY_LOGGER_REDIS_PASSWORD
32
+ const pass = password != null && String(password).trim() !== '' ? String(password).trim() : null
33
+ const isProd = process.env.NODE_ENV === 'production'
34
+ if (!pass && isProd) {
35
+ throw new Error('Redis requires a password in production. Set AZIFY_LOGGER_REDIS_PASSWORD (or options.redis.password).')
36
+ }
37
+
31
38
  const redisOptions = { ...defaultRedisOptions, ...(config.redisOptions || {}) }
32
- const spool = config.spool || null
39
+ if (pass) {
40
+ redisOptions.password = pass
41
+ }
33
42
 
34
43
  const client = new Redis(url, redisOptions)
35
44
  let lastConnectionErrorLog = 0
@@ -42,14 +51,30 @@ function createRedisProducer(config = {}) {
42
51
  global.__azifyLoggerRedisErrorLastLog = 0
43
52
  }
44
53
 
54
+ function isRedisAuthError(err) {
55
+ if (!err) return false
56
+ const code = err.code || ''
57
+ const msg = (err.message || '').toLowerCase()
58
+ return code === 'NOAUTH' || code === 'WRONGPASS' || msg.includes('noauth') || msg.includes('wrongpass') || msg.includes('invalid password') || msg.includes('authentication failed')
59
+ }
60
+
61
+ function isRedisUnavailable(err) {
62
+ if (!err) return false
63
+ const code = err.code || ''
64
+ const msg = (err.message || '').toLowerCase()
65
+ if (code === 'ECONNREFUSED' || msg.includes('econnrefused') || msg.includes('redis')) return true
66
+ return isRedisAuthError(err)
67
+ }
68
+
45
69
  client.on('error', (err) => {
70
+ if (isRedisAuthError(err)) return
46
71
  const now = Date.now()
47
72
  if (!global.__azifyLoggerRedisErrorLogged && now - global.__azifyLoggerRedisErrorLastLog > ERROR_LOG_INTERVAL) {
48
- if (err && (err.code === 'ECONNREFUSED' || err.message?.includes('ECONNREFUSED') || err.message?.includes('Redis'))) {
73
+ if (isRedisUnavailable(err)) {
49
74
  global.__azifyLoggerRedisErrorLogged = true
50
75
  global.__azifyLoggerRedisErrorLastLog = now
51
76
  connectionErrorCount++
52
- process.stderr.write('[azify-logger] ⚠️ Redis indisponível. O sistema de logging está desabilitado. A aplicação continua funcionando normalmente.\n')
77
+ process.stderr.write('[azify-logger] ⚠️ Redis unavailable. Logging is disabled. The application continues to run normally.\n')
53
78
  lastConnectionErrorLog = now
54
79
  }
55
80
  }
@@ -70,9 +95,8 @@ function createRedisProducer(config = {}) {
70
95
  const BATCH_SIZE = 1
71
96
  const BATCH_TIMEOUT = 0
72
97
  let flushing = false
73
- let currentSpool = spool
74
98
 
75
- async function flushBatch() {
99
+ function flushBatch() {
76
100
  if (flushing || batch.length === 0) return
77
101
  flushing = true
78
102
 
@@ -82,19 +106,12 @@ function createRedisProducer(config = {}) {
82
106
  return
83
107
  }
84
108
 
85
- try {
109
+ setImmediate(() => {
86
110
  if (entriesToFlush.length === 1) {
87
111
  const entry = entriesToFlush[0]
88
112
  if (entry) {
89
- try {
90
- const payload = JSON.stringify(entry)
91
- // AGUARDAR o xadd para garantir que o log seja adicionado ao Redis antes de continuar
92
- await client.xadd(streamKey, 'MAXLEN', '~', maxLen, '*', 'entry', payload)
93
- } catch (err) {
94
- if (currentSpool) {
95
- currentSpool.append(entry).catch(() => {})
96
- }
97
- }
113
+ const payload = JSON.stringify(entry)
114
+ client.xadd(streamKey, 'MAXLEN', '~', maxLen, '*', 'entry', payload).catch(() => {})
98
115
  }
99
116
  } else {
100
117
  const pipeline = client.pipeline()
@@ -103,58 +120,40 @@ function createRedisProducer(config = {}) {
103
120
  for (let i = 0; i < entriesToFlush.length; i++) {
104
121
  const entry = entriesToFlush[i]
105
122
  if (!entry) continue
106
- try {
107
- const payload = JSON.stringify(entry)
108
- pipeline.xadd(streamKey, 'MAXLEN', '~', maxLen, '*', 'entry', payload)
109
- validCount++
110
- } catch (err) {
111
- if (currentSpool) {
112
- currentSpool.append(entry).catch(() => {})
113
- }
114
- }
123
+ const payload = JSON.stringify(entry)
124
+ pipeline.xadd(streamKey, 'MAXLEN', '~', maxLen, '*', 'entry', payload)
125
+ validCount++
115
126
  }
116
127
 
117
128
  if (validCount > 0) {
118
- await pipeline.exec().catch((err) => {
119
- for (const entry of entriesToFlush) {
120
- if (currentSpool) {
121
- currentSpool.append(entry).catch(() => {})
122
- }
123
- }
124
- })
129
+ pipeline.exec().catch(() => {})
125
130
  }
126
131
  }
127
- } catch (err) {
128
- for (const entry of entriesToFlush) {
129
- if (currentSpool) {
130
- currentSpool.append(entry).catch(() => {})
131
- }
132
+
133
+ flushing = false
134
+ if (batch.length >= BATCH_SIZE) {
135
+ flushBatch()
136
+ } else if (batch.length > 0) {
137
+ scheduleFlush()
132
138
  }
133
- }
134
-
135
- flushing = false
136
- if (batch.length >= BATCH_SIZE) {
137
- setImmediate(() => { void flushBatch() })
138
- } else if (batch.length > 0) {
139
- scheduleFlush()
140
- }
139
+ })
141
140
  }
142
141
 
143
142
  function scheduleFlush() {
144
143
  if (batchTimer || flushing) return
145
144
 
146
145
  if (batch.length >= BATCH_SIZE) {
147
- void flushBatch()
146
+ flushBatch()
148
147
  } else if (BATCH_TIMEOUT === 0) {
149
148
  setImmediate(() => {
150
149
  if (!flushing) {
151
- void flushBatch()
150
+ flushBatch()
152
151
  }
153
152
  })
154
153
  } else {
155
154
  batchTimer = setTimeout(() => {
156
155
  batchTimer = null
157
- void flushBatch()
156
+ flushBatch()
158
157
  }, BATCH_TIMEOUT)
159
158
 
160
159
  if (typeof batchTimer.unref === 'function') {
@@ -163,7 +162,7 @@ function createRedisProducer(config = {}) {
163
162
  }
164
163
  }
165
164
 
166
- async function enqueue(entry) {
165
+ function enqueue(entry) {
167
166
  batch.push(entry)
168
167
 
169
168
  if (batch.length >= BATCH_SIZE) {
@@ -171,8 +170,7 @@ function createRedisProducer(config = {}) {
171
170
  clearTimeout(batchTimer)
172
171
  batchTimer = null
173
172
  }
174
- // AGUARDAR o flushBatch quando o batch está cheio para garantir que o log seja adicionado ao Redis
175
- await flushBatch()
173
+ flushBatch()
176
174
  } else if (batch.length === 1) {
177
175
  scheduleFlush()
178
176
  }
@@ -184,7 +182,7 @@ function createRedisProducer(config = {}) {
184
182
  batchTimer = null
185
183
  }
186
184
  while (batch.length > 0) {
187
- await flushBatch()
185
+ flushBatch()
188
186
  await new Promise(resolve => setTimeout(resolve, 10))
189
187
  }
190
188
  await client.quit().catch(() => {})
@@ -195,11 +193,7 @@ function createRedisProducer(config = {}) {
195
193
  client,
196
194
  streamKey,
197
195
  close,
198
- flush: flushBatch,
199
- setSpool(newSpool) {
200
- currentSpool = newSpool
201
- },
202
- _spool: null
196
+ flush: flushBatch
203
197
  }
204
198
  }
205
199