azify-logger 1.0.28 → 1.0.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -12,14 +12,14 @@ const DEAD_LETTER_STREAM_KEY = process.env.AZIFY_LOGGER_REDIS_DLQ || `${STREAM_K
12
12
  const REDIS_URL = process.env.AZIFY_LOGGER_REDIS_URL || 'redis://localhost:6381'
13
13
  const WORKER_GROUP = process.env.AZIFY_LOGGER_REDIS_GROUP || 'azify-logger-workers'
14
14
  const CONSUMER_NAME = process.env.AZIFY_LOGGER_REDIS_CONSUMER || `${os.hostname()}-${process.pid}`
15
- const MAX_BATCH = Number(process.env.AZIFY_LOGGER_REDIS_BATCH || 100)
15
+ const MAX_BATCH = Number(process.env.AZIFY_LOGGER_REDIS_BATCH || 500)
16
16
  const BLOCK_MS = Number(process.env.AZIFY_LOGGER_REDIS_BLOCK || 5000)
17
17
  const MAX_DELIVERY_ATTEMPTS = Number(process.env.AZIFY_LOGGER_MAX_DELIVERY_ATTEMPTS || 10)
18
18
  const PENDING_IDLE_TIMEOUT = Number(process.env.AZIFY_LOGGER_PENDING_IDLE_TIMEOUT || 60000)
19
19
 
20
20
  const TRANSPORT_TIMEOUT = Number(process.env.AZIFY_LOGGER_HTTP_TIMEOUT || 250)
21
- const MAX_SOCKETS = Number(process.env.AZIFY_LOGGER_MAX_SOCKETS || 20)
22
- const WORKER_CONCURRENCY = Math.max(1, Number(process.env.AZIFY_LOGGER_WORKER_CONCURRENCY || 25))
21
+ const MAX_SOCKETS = Number(process.env.AZIFY_LOGGER_MAX_SOCKETS || 50)
22
+ const WORKER_CONCURRENCY = Math.max(1, Number(process.env.AZIFY_LOGGER_WORKER_CONCURRENCY || 100))
23
23
  const NO_GROUP_RETRY_DELAY = Number(process.env.AZIFY_LOGGER_WORKER_NOGROUP_DELAY || 250)
24
24
 
25
25
  const httpAgent = new http.Agent({ keepAlive: true, maxSockets: MAX_SOCKETS })
@@ -46,9 +46,7 @@ let deliveries = 0
46
46
  let lastRedisErrorLog = 0
47
47
  let consecutiveNoGroupErrors = 0
48
48
  let redisErrorCount = 0
49
- const REDIS_ERROR_LOG_INTERVAL = 300000 // 5 minutos entre logs (evitar logs repetidos)
50
-
51
- // Usar flag global compartilhada com redisQueue.js para garantir apenas 1 log por processo
49
+ const REDIS_ERROR_LOG_INTERVAL = 300000
52
50
  if (typeof global.__azifyLoggerRedisErrorLogged === 'undefined') {
53
51
  global.__azifyLoggerRedisErrorLogged = false
54
52
  global.__azifyLoggerRedisErrorLastLog = 0
@@ -56,23 +54,18 @@ if (typeof global.__azifyLoggerRedisErrorLogged === 'undefined') {
56
54
 
57
55
  const redis = new Redis(REDIS_URL, redisOptions)
58
56
  redis.on('error', (err) => {
59
- // Log apenas uma vez por processo inteiro (compartilhado com producer)
60
- // Se já foi logado pelo producer, não logar novamente
61
57
  const now = Date.now()
62
58
  if (!global.__azifyLoggerRedisErrorLogged && now - global.__azifyLoggerRedisErrorLastLog > REDIS_ERROR_LOG_INTERVAL) {
63
59
  if (err && (err.code === 'ECONNREFUSED' || err.message?.includes('ECONNREFUSED') || err.message?.includes('Redis'))) {
64
60
  global.__azifyLoggerRedisErrorLogged = true
65
61
  global.__azifyLoggerRedisErrorLastLog = now
66
62
  redisErrorCount++
67
- // Mensagem clara: aplicação continua funcionando, apenas logging está desabilitado
68
63
  process.stderr.write('[azify-logger] ⚠️ Redis indisponível. O sistema de logging está desabilitado. A aplicação continua funcionando normalmente.\n')
69
64
  lastRedisErrorLog = now
70
65
  }
71
66
  }
72
- // Após primeira mensagem, não logar mais - silenciar completamente
73
67
  })
74
68
  redis.on('connect', () => {
75
- // Resetar contador quando conectar com sucesso (sem logar para não poluir)
76
69
  if (redisErrorCount > 0 || global.__azifyLoggerRedisErrorLogged) {
77
70
  redisErrorCount = 0
78
71
  lastRedisErrorLog = 0
@@ -110,7 +103,6 @@ function sleep(ms) {
110
103
  return new Promise(resolve => setTimeout(resolve, ms))
111
104
  }
112
105
 
113
- // Headers sensíveis que devem ser mascarados
114
106
  const SENSITIVE_HEADER_KEYS = new Set([
115
107
  'authorization',
116
108
  'cookie',
@@ -123,7 +115,6 @@ const SENSITIVE_HEADER_KEYS = new Set([
123
115
  'x-timestamp'
124
116
  ])
125
117
 
126
- // Campos sensíveis no body que devem ser mascarados
127
118
  const SENSITIVE_BODY_FIELDS = new Set([
128
119
  'password',
129
120
  'token',
@@ -142,7 +133,6 @@ const SENSITIVE_BODY_FIELDS = new Set([
142
133
  'cvc'
143
134
  ])
144
135
 
145
- // Função para sanitizar headers
146
136
  function sanitizeHeaders(headers) {
147
137
  if (!headers || typeof headers !== 'object') {
148
138
  return {}
@@ -161,15 +151,12 @@ function sanitizeHeaders(headers) {
161
151
  return sanitized
162
152
  }
163
153
 
164
- // Função para sanitizar body (sem truncamento - manter tamanho completo)
165
154
  function sanitizeBody(body) {
166
155
  if (!body || typeof body !== 'object') {
167
- // Se não for objeto, retornar como está (não truncar strings)
168
156
  return body
169
157
  }
170
158
 
171
159
  try {
172
- // Sanitizar campos sensíveis recursivamente (sem limitação de tamanho)
173
160
  const sanitized = Array.isArray(body) ? [] : {}
174
161
 
175
162
  for (const key in body) {
@@ -177,25 +164,20 @@ function sanitizeBody(body) {
177
164
  const lower = String(key).toLowerCase()
178
165
 
179
166
  if (SENSITIVE_BODY_FIELDS.has(lower) || lower.includes('password') || lower.includes('secret')) {
180
- // Mascarar campos sensíveis
181
167
  sanitized[key] = '***'
182
168
  } else if (typeof body[key] === 'object' && body[key] !== null) {
183
- // Recursivamente sanitizar objetos aninhados (sem limitação de profundidade)
184
169
  sanitized[key] = sanitizeBody(body[key])
185
170
  } else {
186
- // Copiar valores não sensíveis (mantendo tamanho completo)
187
171
  sanitized[key] = body[key]
188
172
  }
189
173
  }
190
174
 
191
175
  return sanitized
192
176
  } catch (err) {
193
- // Se houver erro na sanitização, retornar body original (não truncar)
194
177
  return body
195
178
  }
196
179
  }
197
180
 
198
- // Função para sanitizar payload completo
199
181
  function sanitizePayload(payload) {
200
182
  if (!payload || typeof payload !== 'object') {
201
183
  return payload
@@ -203,29 +185,22 @@ function sanitizePayload(payload) {
203
185
 
204
186
  const sanitized = { ...payload }
205
187
 
206
- // Sanitizar meta se existir
207
188
  if (sanitized.meta && typeof sanitized.meta === 'object') {
208
- // Sanitizar headers da request
209
189
  if (sanitized.meta.request && sanitized.meta.request.headers) {
210
190
  sanitized.meta.request.headers = sanitizeHeaders(sanitized.meta.request.headers)
211
191
  }
212
192
 
213
- // Sanitizar headers da response
214
193
  if (sanitized.meta.response && sanitized.meta.response.headers) {
215
194
  sanitized.meta.response.headers = sanitizeHeaders(sanitized.meta.response.headers)
216
195
  }
217
196
 
218
- // Sanitizar body da request
219
197
  if (sanitized.meta.request && sanitized.meta.request.body) {
220
198
  sanitized.meta.request.body = sanitizeBody(sanitized.meta.request.body)
221
199
  }
222
200
 
223
- // Sanitizar body da response
224
201
  if (sanitized.meta.response && sanitized.meta.response.body) {
225
202
  sanitized.meta.response.body = sanitizeBody(sanitized.meta.response.body)
226
203
  }
227
-
228
- // Sanitizar headers de HTTP client (interceptors)
229
204
  if (sanitized.meta.headers) {
230
205
  sanitized.meta.headers = sanitizeHeaders(sanitized.meta.headers)
231
206
  }
@@ -249,19 +224,25 @@ async function deliver(entry) {
249
224
  return
250
225
  }
251
226
 
252
- // Sanitizar payload antes de enviar
253
- const sanitizedPayload = entry.payload ? sanitizePayload(entry.payload) : entry.payload
227
+ let sanitizedPayload = entry.payload
228
+ if (entry.payload && typeof entry.payload === 'object') {
229
+ try {
230
+ sanitizedPayload = sanitizePayload(entry.payload)
231
+ } catch (err) {
232
+ sanitizedPayload = entry.payload
233
+ }
234
+ }
254
235
 
255
236
  await axios.post(target, sanitizedPayload, {
256
237
  headers: entry.headers || {},
257
238
  timeout: TRANSPORT_TIMEOUT,
258
239
  httpAgent,
259
240
  httpsAgent,
260
- validateStatus: () => true
241
+ validateStatus: () => true,
242
+ maxRedirects: 0
261
243
  })
262
244
 
263
245
  deliveries += 1
264
- // Log removido para reduzir ruído nos logs
265
246
  }
266
247
 
267
248
  async function requeue(entry, attempts) {
@@ -334,8 +315,6 @@ async function processEntry(raw) {
334
315
  await acknowledge(id)
335
316
  consecutiveNoGroupErrors = 0
336
317
  } catch (error) {
337
- // Silenciar logs de erro - não poluir logs da aplicação
338
- // Apenas enviar para DLQ após max tentativas ou requeue silenciosamente
339
318
  if (attempts + 1 >= MAX_DELIVERY_ATTEMPTS) {
340
319
  await acknowledge(id)
341
320
  await deadLetter(entry, error && error.message ? error.message : 'delivery-error')
@@ -351,24 +330,24 @@ async function processBatch(entries) {
351
330
  return
352
331
  }
353
332
 
354
- const executing = []
355
- for (const entry of entries) {
356
- executing.push(processEntry(entry))
357
- if (executing.length >= WORKER_CONCURRENCY) {
358
- await Promise.allSettled(executing.splice(0, executing.length))
359
- }
360
- }
361
-
362
- if (executing.length) {
363
- await Promise.allSettled(executing)
333
+ const chunkSize = WORKER_CONCURRENCY
334
+ const chunks = []
335
+
336
+ for (let i = 0; i < entries.length; i += chunkSize) {
337
+ chunks.push(entries.slice(i, i + chunkSize))
364
338
  }
339
+
340
+ const chunkPromises = chunks.map(chunk => {
341
+ return Promise.allSettled(chunk.map(entry => processEntry(entry)))
342
+ })
343
+
344
+ await Promise.all(chunkPromises)
365
345
  }
366
346
 
367
347
  async function consumeLoop() {
368
348
  let groupEnsured = false
369
349
 
370
350
  while (!stopRequested) {
371
- // Garantir que o grupo existe antes de tentar ler
372
351
  if (!groupEnsured) {
373
352
  try {
374
353
  await ensureGroup()
@@ -392,8 +371,6 @@ async function consumeLoop() {
392
371
 
393
372
  let messages = null
394
373
  try {
395
- // Usar xreadgroup diretamente do ioredis
396
- // Sintaxe: xreadgroup('GROUP', group, consumer, 'COUNT', count, 'BLOCK', block, 'STREAMS', key, id)
397
374
  messages = await redis.xreadgroup(
398
375
  'GROUP', WORKER_GROUP, CONSUMER_NAME,
399
376
  'COUNT', MAX_BATCH,
@@ -401,9 +378,7 @@ async function consumeLoop() {
401
378
  'STREAMS', STREAM_KEY, '>'
402
379
  )
403
380
 
404
- // Se retornar mensagens, processar
405
381
  if (messages && Array.isArray(messages) && messages.length > 0) {
406
- // XREADGROUP retorna [[streamName, [entries]]]
407
382
  for (const streamData of messages) {
408
383
  if (Array.isArray(streamData) && streamData.length >= 2) {
409
384
  const entries = streamData[1]
@@ -416,12 +391,10 @@ async function consumeLoop() {
416
391
  continue
417
392
  }
418
393
 
419
- // Se não houver mensagens novas, verificar pendentes
420
394
  await claimPending()
421
395
  } catch (err) {
422
396
  const errMsg = err && err.message ? err.message : String(err)
423
397
 
424
- // Se for erro de grupo ou sintaxe, recriar o grupo
425
398
  if (isNoGroupError(err) || errMsg.includes('syntax error') || errMsg.includes('NOGROUP')) {
426
399
  groupEnsured = false
427
400
  consecutiveNoGroupErrors += 1
@@ -432,7 +405,6 @@ async function consumeLoop() {
432
405
  continue
433
406
  }
434
407
 
435
- // Outros erros - log apenas a cada 5 segundos para evitar spam
436
408
  const now = Date.now()
437
409
  if (now - lastRedisErrorLog > 5000) {
438
410
  console.error('[azify-logger][worker] erro ao ler stream:', errMsg)
package/server.js CHANGED
@@ -120,7 +120,7 @@ async function ensureIndexTemplate() {
120
120
  mappings: {
121
121
  properties: {
122
122
  '@timestamp': { type: 'date' },
123
- level: { type: 'keyword' },
123
+ level: { type: 'text', fields: { keyword: { type: 'keyword' } } },
124
124
  message: { type: 'text' },
125
125
  service: {
126
126
  properties: {
@@ -399,7 +399,7 @@ async function setupGrafanaForApp(appName) {
399
399
  esVersion: '2.11.1',
400
400
  version: '2.11.1',
401
401
  logMessageField: 'message',
402
- logLevelField: 'level',
402
+ logLevelField: 'level.keyword',
403
403
  maxConcurrentShardRequests: 5,
404
404
  includeFrozen: false,
405
405
  xpack: false,
@@ -481,6 +481,90 @@ async function setupGrafanaForApp(appName) {
481
481
  }
482
482
  }
483
483
 
484
+ // Datasource Grafana Tempo para tracing completo (opcional - não bloqueia criação da org)
485
+ try {
486
+ const tempoDatasourceUid = `tempo-${appName.toLowerCase()}`
487
+ const tempoUrl = runningInDocker ? 'http://azify-tempo:3200' : 'http://localhost:3200'
488
+ const tempoDatasourceConfig = {
489
+ name: `Tempo-${appName}`,
490
+ type: 'tempo',
491
+ access: 'proxy',
492
+ url: tempoUrl,
493
+ uid: tempoDatasourceUid,
494
+ isDefault: false,
495
+ jsonData: {
496
+ httpMethod: 'GET',
497
+ tracesToLogs: {
498
+ datasourceUid: datasourceUid,
499
+ tags: ['job', 'service', 'pod'],
500
+ mappedTags: [{ key: 'service.name', value: 'service' }],
501
+ mapTagNamesEnabled: false,
502
+ spanStartTimeShift: '1h',
503
+ spanEndTimeShift: '1h',
504
+ filterByTraceID: false,
505
+ filterBySpanID: false
506
+ },
507
+ serviceMap: {
508
+ datasourceUid: datasourceUid
509
+ },
510
+ nodeGraph: {
511
+ enabled: true
512
+ },
513
+ search: {
514
+ hide: false
515
+ }
516
+ },
517
+ editable: true,
518
+ version: 1
519
+ }
520
+
521
+ try {
522
+ console.log(`[setupGrafana] Verificando datasource Tempo existente: ${tempoDatasourceUid} na org ${org.id}`)
523
+ const existingTempo = await axios.get(`${grafanaUrl}/api/datasources/uid/${tempoDatasourceUid}`, {
524
+ auth,
525
+ headers: { 'X-Grafana-Org-Id': org.id },
526
+ timeout: 3000
527
+ })
528
+ console.log(`[setupGrafana] Datasource Tempo existente encontrado: ${existingTempo.data?.id || 'N/A'}`)
529
+ try {
530
+ await axios.put(
531
+ `${grafanaUrl}/api/datasources/${existingTempo.data.id}`,
532
+ tempoDatasourceConfig,
533
+ {
534
+ auth,
535
+ headers: { 'X-Grafana-Org-Id': org.id },
536
+ timeout: 3000
537
+ }
538
+ )
539
+ console.log(`[setupGrafana] ✅ Datasource Tempo atualizado: ${tempoDatasourceUid}`)
540
+ } catch (updateTempoErr) {
541
+ console.error(`[setupGrafana] ⚠️ Erro ao atualizar datasource Tempo: ${updateTempoErr.response?.data?.message || updateTempoErr.message}`)
542
+ }
543
+ } catch (tempoError) {
544
+ console.log(`[setupGrafana] Erro ao verificar datasource Tempo: status=${tempoError.response?.status || 'N/A'}, message=${tempoError.response?.data?.message || tempoError.message}`)
545
+ if (tempoError.response?.status === 404) {
546
+ try {
547
+ const tempoCreate = await axios.post(
548
+ `${grafanaUrl}/api/datasources`,
549
+ tempoDatasourceConfig,
550
+ {
551
+ auth,
552
+ headers: { 'X-Grafana-Org-Id': org.id },
553
+ timeout: 3000
554
+ }
555
+ )
556
+ console.log(`[setupGrafana] ✅ Datasource Tempo criado: ${tempoDatasourceUid} (ID: ${tempoCreate.data?.datasource?.id || 'N/A'})`)
557
+ } catch (createTempoError) {
558
+ console.error(`[setupGrafana] ⚠️ Erro ao criar datasource Tempo via API: ${createTempoError.response?.data?.message || createTempoError.message}`)
559
+ console.log(`[setupGrafana] 💡 Datasource Tempo será criado manualmente ou na próxima tentativa`)
560
+ }
561
+ }
562
+ }
563
+ } catch (tempoGeneralError) {
564
+ console.error(`[setupGrafana] ⚠️ Erro geral ao configurar datasource Tempo: ${tempoGeneralError.message}`)
565
+ console.log(`[setupGrafana] 💡 Continuando sem datasource Tempo - não é crítico para funcionamento básico`)
566
+ }
567
+
484
568
  console.log(`[setupGrafana] 📈 Criando dashboard para ${appName}...`)
485
569
 
486
570
  const appNameLower = appName.toLowerCase()
@@ -505,7 +589,7 @@ async function setupGrafanaForApp(appName) {
505
589
  datasource: { uid: datasourceUid, type: 'grafana-opensearch-datasource' },
506
590
  targets: [{
507
591
  refId: 'A',
508
- query: `message:"[REQUEST]" AND NOT message:"[RESPONSE]" AND ${indexFilter}`,
592
+ query: `message:"[RESPONSE]" AND ${indexFilter}`,
509
593
  bucketAggs: [{
510
594
  id: '2',
511
595
  type: 'date_histogram',
@@ -958,55 +1042,37 @@ async function handleLog(req, res) {
958
1042
  }
959
1043
 
960
1044
  const truncateBody = (bodyValue, forResponse = false) => {
961
- if (forResponse && typeof bodyValue === 'string') {
962
- if (bodyValue.length > 10000) {
963
- return bodyValue.substring(0, 10000) + '... [truncated]'
1045
+ if (forResponse) {
1046
+ if (typeof bodyValue === 'string') {
1047
+ return bodyValue
1048
+ } else if (Buffer.isBuffer(bodyValue)) {
1049
+ return bodyValue.toString('utf8')
1050
+ } else if (typeof bodyValue === 'object' && bodyValue !== null) {
1051
+ try {
1052
+ return JSON.stringify(bodyValue)
1053
+ } catch (_) {
1054
+ return String(bodyValue)
1055
+ }
964
1056
  }
965
- return bodyValue
1057
+ return String(bodyValue)
966
1058
  }
967
1059
 
968
1060
  if (typeof bodyValue === 'string') {
969
- if (!forResponse && (bodyValue.trim().startsWith('{') || bodyValue.trim().startsWith('['))) {
1061
+ if (bodyValue.trim().startsWith('{') || bodyValue.trim().startsWith('[')) {
970
1062
  try {
971
1063
  let parsed = JSON.parse(bodyValue)
972
1064
  if (typeof parsed === 'object') {
973
- let serialized = JSON.stringify(parsed)
974
- if (serialized.length > 10000) {
975
- return bodyValue.substring(0, 10000) + '... [truncated]'
976
- }
977
1065
  return parsed
978
1066
  }
979
1067
  } catch (_) { }
980
1068
  }
981
- if (bodyValue.length > 10000) {
982
- return bodyValue.substring(0, 10000) + '... [truncated]'
983
- }
984
1069
  return bodyValue
985
1070
  } else if (typeof bodyValue === 'object' && bodyValue !== null) {
986
- try {
987
- let serialized = JSON.stringify(bodyValue)
988
- if (serialized.length > 10000 || forResponse) {
989
- return serialized.substring(0, 10000) + '... [truncated]'
990
- }
991
- return bodyValue
992
- } catch (e) {
993
- try {
994
- let str = String(bodyValue)
995
- if (str.length > 10000) {
996
- return str.substring(0, 10000) + '... [truncated]'
997
- }
998
- return str
999
- } catch (_) {
1000
- return '[Unable to serialize body]'
1001
- }
1002
- }
1071
+ return bodyValue
1003
1072
  } else if (Buffer.isBuffer(bodyValue)) {
1004
1073
  try {
1005
1074
  let str = bodyValue.toString('utf8')
1006
- if (str.length > 10000) {
1007
- return str.substring(0, 10000) + '... [truncated]'
1008
- }
1009
- if (!forResponse && (str.trim().startsWith('{') || str.trim().startsWith('['))) {
1075
+ if (str.trim().startsWith('{') || str.trim().startsWith('[')) {
1010
1076
  try {
1011
1077
  return JSON.parse(str)
1012
1078
  } catch (_) {
@@ -1029,14 +1095,33 @@ async function handleLog(req, res) {
1029
1095
  } else {
1030
1096
  value.body = processedBody
1031
1097
  }
1032
- } else if (key === 'response' && value && typeof value === 'object' && value.body !== undefined) {
1033
- const processedBody = truncateBody(value.body, true)
1034
- const bodyString = typeof processedBody === 'string' ? processedBody : JSON.stringify(processedBody)
1035
- logEntry.responseBody = bodyString
1036
- const { body, ...responseWithoutBody } = value
1037
- logEntry[key] = responseWithoutBody
1098
+ } else if (key === 'response' && value && typeof value === 'object') {
1099
+ if (value.statusCode != null) {
1100
+ logEntry.statusCode = value.statusCode
1101
+ }
1102
+ if (value.durationMs != null) {
1103
+ logEntry.responseTime = value.durationMs
1104
+ }
1105
+ if (value.body !== undefined) {
1106
+ const processedBody = truncateBody(value.body, true)
1107
+ let bodyString
1108
+ if (typeof processedBody === 'string') {
1109
+ bodyString = processedBody
1110
+ } else {
1111
+ bodyString = JSON.stringify(processedBody)
1112
+ }
1113
+ logEntry.responseBody = bodyString
1114
+ const { body, ...responseWithoutBody } = value
1115
+ logEntry[key] = responseWithoutBody
1116
+ } else {
1117
+ logEntry[key] = value
1118
+ }
1038
1119
  } else if (key === 'responseBody' || key === 'requestBody') {
1039
- value = truncateBody(value)
1120
+ if (key === 'responseBody') {
1121
+ value = truncateBody(value, true)
1122
+ } else {
1123
+ value = truncateBody(value, false)
1124
+ }
1040
1125
  logEntry[key] = value
1041
1126
  } else {
1042
1127
  logEntry[key] = value
@@ -1053,10 +1138,6 @@ async function handleLog(req, res) {
1053
1138
  const serviceName = appName.toLowerCase().replace(/[^a-z0-9-]/g, '-')
1054
1139
  const indexName = `logs-${serviceName}`
1055
1140
 
1056
- if (logEntry.responseBody && typeof logEntry.responseBody === 'string' && logEntry.responseBody.length > 10000) {
1057
- logEntry.responseBody = logEntry.responseBody.substring(0, 10000) + '... [truncated]'
1058
- }
1059
-
1060
1141
  await axios.post(`${osUrl}/${indexName}/_doc`, logEntry, {
1061
1142
  headers: { 'Content-Type': 'application/json' }
1062
1143
  })
package/store.js CHANGED
@@ -2,8 +2,14 @@ const { AsyncLocalStorage } = require('async_hooks')
2
2
 
3
3
  const als = new AsyncLocalStorage()
4
4
 
5
- function generateId(bytes = 16) {
6
- return require('crypto').randomBytes(bytes).toString('hex')
5
+ function fastGenerateId(length = 16) {
6
+ const chars = '0123456789abcdef'
7
+ let result = ''
8
+ const totalChars = length * 2
9
+ for (let i = 0; i < totalChars; i++) {
10
+ result += chars[Math.floor(Math.random() * chars.length)]
11
+ }
12
+ return result
7
13
  }
8
14
 
9
15
  function toTraceId(hex32) {
@@ -12,8 +18,8 @@ function toTraceId(hex32) {
12
18
  }
13
19
 
14
20
  function startRequestContext(initial = {}) {
15
- const traceHex = initial.traceHex || generateId(16)
16
- const spanHex = initial.spanHex || generateId(8)
21
+ const traceHex = initial.traceHex || fastGenerateId(16)
22
+ const spanHex = initial.spanHex || fastGenerateId(8)
17
23
  const ctx = {
18
24
  traceId: toTraceId(traceHex),
19
25
  spanId: spanHex,