llmflow 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/logger.js ADDED
@@ -0,0 +1,122 @@
1
+ /**
2
+ * Colored console logger for LLMFlow
3
+ * Clean, compact output with optional verbose mode
4
+ */
5
+
6
+ const VERBOSE = process.env.VERBOSE === '1' || process.argv.includes('--verbose');
7
+
8
+ const c = {
9
+ reset: '\x1b[0m',
10
+ dim: '\x1b[2m',
11
+ bold: '\x1b[1m',
12
+ green: '\x1b[32m',
13
+ red: '\x1b[31m',
14
+ yellow: '\x1b[33m',
15
+ cyan: '\x1b[36m',
16
+ magenta: '\x1b[35m',
17
+ blue: '\x1b[34m',
18
+ };
19
+
20
+ function timestamp() {
21
+ return new Date().toISOString().slice(11, 23);
22
+ }
23
+
24
+ function formatDuration(ms) {
25
+ if (ms < 1000) return `${ms}ms`;
26
+ return `${(ms / 1000).toFixed(1)}s`;
27
+ }
28
+
29
+ function formatTokens(tokens) {
30
+ if (!tokens) return '';
31
+ if (tokens >= 1000) return `${(tokens / 1000).toFixed(1)}k`;
32
+ return String(tokens);
33
+ }
34
+
35
+ const logger = {
36
+ // Startup messages
37
+ startup(message) {
38
+ console.log(`${c.cyan}[llmflow]${c.reset} ${message}`);
39
+ },
40
+
41
+ info(message) {
42
+ console.log(`${c.dim}[llmflow]${c.reset} ${message}`);
43
+ },
44
+
45
+ warn(message) {
46
+ console.log(`${c.yellow}[llmflow]${c.reset} ${message}`);
47
+ },
48
+
49
+ error(message) {
50
+ console.log(`${c.red}[llmflow]${c.reset} ${message}`);
51
+ },
52
+
53
+ // Request logging - compact by default
54
+ request(method, path, traceId) {
55
+ if (VERBOSE) {
56
+ console.log(`${c.dim}${timestamp()}${c.reset} ${c.blue}>>>${c.reset} ${method} ${path} ${c.dim}${traceId.slice(0, 8)}${c.reset}`);
57
+ }
58
+ },
59
+
60
+ // Response logging - always shown but compact
61
+ response(opts) {
62
+ const { method, path, status, duration, model, tokens, streaming, traceId } = opts;
63
+
64
+ const statusColor = status < 400 ? c.green : c.red;
65
+ const statusText = `${statusColor}${status}${c.reset}`;
66
+ const durationText = `${c.dim}${formatDuration(duration)}${c.reset}`;
67
+
68
+ let details = '';
69
+ if (model) details += ` ${c.cyan}${model}${c.reset}`;
70
+ if (tokens) details += ` ${c.dim}${formatTokens(tokens)} tok${c.reset}`;
71
+ if (streaming) details += ` ${c.magenta}stream${c.reset}`;
72
+
73
+ if (VERBOSE) {
74
+ console.log(`${c.dim}${timestamp()}${c.reset} ${c.green}<<<${c.reset} ${statusText} ${durationText}${details} ${c.dim}${traceId.slice(0, 8)}${c.reset}`);
75
+ } else {
76
+ // Compact: single line with key info
77
+ const shortPath = path.length > 20 ? '...' + path.slice(-17) : path;
78
+ console.log(`${c.dim}${timestamp()}${c.reset} ${method} ${shortPath.padEnd(20)} ${statusText} ${durationText.padStart(8)}${details}`);
79
+ }
80
+ },
81
+
82
+ // API proxy specific
83
+ proxy(opts) {
84
+ const { model, tokens, cost, duration, streaming, error } = opts;
85
+
86
+ if (error) {
87
+ console.log(`${c.dim}${timestamp()}${c.reset} ${c.red}ERR${c.reset} ${error.slice(0, 50)}`);
88
+ return;
89
+ }
90
+
91
+ const parts = [
92
+ `${c.dim}${timestamp()}${c.reset}`,
93
+ streaming ? `${c.magenta}STREAM${c.reset}` : `${c.green}OK${c.reset}`,
94
+ model ? `${c.cyan}${model}${c.reset}` : '',
95
+ tokens ? `${formatTokens(tokens)} tok` : '',
96
+ cost ? `$${cost.toFixed(4)}` : '',
97
+ `${c.dim}${formatDuration(duration)}${c.reset}`,
98
+ ].filter(Boolean);
99
+
100
+ console.log(parts.join(' '));
101
+ },
102
+
103
+ // Dashboard API (only in verbose)
104
+ dashboard(method, path, duration) {
105
+ if (VERBOSE) {
106
+ console.log(`${c.dim}${timestamp()} ${method} ${path} ${formatDuration(duration)}${c.reset}`);
107
+ }
108
+ },
109
+
110
+ // Verbose only debug info
111
+ debug(message) {
112
+ if (VERBOSE) {
113
+ console.log(`${c.dim}${timestamp()} ${message}${c.reset}`);
114
+ }
115
+ },
116
+
117
+ isVerbose() {
118
+ return VERBOSE;
119
+ }
120
+ };
121
+
122
+ module.exports = logger;
package/otlp-export.js ADDED
@@ -0,0 +1,564 @@
1
+ /**
2
+ * OTLP Export Module for LLMFlow
3
+ *
4
+ * Exports traces, logs, and metrics to external observability backends
5
+ * via OpenTelemetry Protocol (OTLP/HTTP JSON).
6
+ *
7
+ * Supported backends:
8
+ * - Jaeger (OTLP)
9
+ * - Phoenix/Arize (OTLP)
10
+ * - Langfuse (OTLP)
11
+ * - Opik/Comet (OTLP)
12
+ * - Grafana Tempo (OTLP)
13
+ * - Any OTLP-compatible backend
14
+ */
15
+
16
+ const https = require('https');
17
+ const http = require('http');
18
+ const log = require('./logger');
19
+
20
+ const EXPORT_ENDPOINTS = {
21
+ traces: process.env.OTLP_EXPORT_TRACES_ENDPOINT || process.env.OTLP_EXPORT_ENDPOINT,
22
+ logs: process.env.OTLP_EXPORT_LOGS_ENDPOINT,
23
+ metrics: process.env.OTLP_EXPORT_METRICS_ENDPOINT
24
+ };
25
+
26
+ const EXPORT_HEADERS = parseHeaders(process.env.OTLP_EXPORT_HEADERS || '');
27
+ const EXPORT_ENABLED = process.env.OTLP_EXPORT_ENABLED === 'true' || !!EXPORT_ENDPOINTS.traces;
28
+ const BATCH_SIZE = parseInt(process.env.OTLP_EXPORT_BATCH_SIZE || '100', 10);
29
+ const FLUSH_INTERVAL_MS = parseInt(process.env.OTLP_EXPORT_FLUSH_INTERVAL || '5000', 10);
30
+
31
+ let traceBatch = [];
32
+ let logBatch = [];
33
+ let metricBatch = [];
34
+ let flushTimer = null;
35
+
36
+ /**
37
+ * Parse headers from comma-separated key=value format
38
+ * Example: "Authorization=Bearer xxx,X-Custom=value"
39
+ */
40
+ function parseHeaders(headerStr) {
41
+ if (!headerStr) return {};
42
+ const headers = {};
43
+ headerStr.split(',').forEach(pair => {
44
+ const [key, ...valueParts] = pair.split('=');
45
+ if (key && valueParts.length > 0) {
46
+ headers[key.trim()] = valueParts.join('=').trim();
47
+ }
48
+ });
49
+ return headers;
50
+ }
51
+
52
+ /**
53
+ * Convert LLMFlow trace to OTLP span format
54
+ */
55
+ function traceToOtlpSpan(trace) {
56
+ const startTimeNano = BigInt(trace.timestamp) * BigInt(1000000);
57
+ const durationNano = BigInt(trace.duration_ms || 0) * BigInt(1000000);
58
+ const endTimeNano = startTimeNano + durationNano;
59
+
60
+ const attributes = [
61
+ { key: 'gen_ai.system', value: { stringValue: trace.provider || 'unknown' } },
62
+ { key: 'gen_ai.request.model', value: { stringValue: trace.model || 'unknown' } },
63
+ { key: 'gen_ai.usage.prompt_tokens', value: { intValue: String(trace.prompt_tokens || 0) } },
64
+ { key: 'gen_ai.usage.completion_tokens', value: { intValue: String(trace.completion_tokens || 0) } },
65
+ { key: 'gen_ai.usage.total_tokens', value: { intValue: String(trace.total_tokens || 0) } },
66
+ { key: 'llmflow.cost', value: { doubleValue: trace.estimated_cost || 0 } },
67
+ { key: 'llmflow.span_type', value: { stringValue: trace.span_type || 'llm' } }
68
+ ];
69
+
70
+ if (trace.span_name) {
71
+ attributes.push({ key: 'llmflow.span_name', value: { stringValue: trace.span_name } });
72
+ }
73
+
74
+ if (trace.service_name) {
75
+ attributes.push({ key: 'service.name', value: { stringValue: trace.service_name } });
76
+ }
77
+
78
+ const statusCode = (trace.status && trace.status >= 400) ? 2 : 1;
79
+
80
+ return {
81
+ traceId: normalizeTraceId(trace.trace_id || trace.id),
82
+ spanId: normalizeSpanId(trace.id),
83
+ parentSpanId: trace.parent_id ? normalizeSpanId(trace.parent_id) : undefined,
84
+ name: trace.span_name || trace.model || 'llm.request',
85
+ kind: 3, // SPAN_KIND_CLIENT
86
+ startTimeUnixNano: startTimeNano.toString(),
87
+ endTimeUnixNano: endTimeNano.toString(),
88
+ attributes,
89
+ status: {
90
+ code: statusCode,
91
+ message: trace.error || undefined
92
+ }
93
+ };
94
+ }
95
+
96
+ /**
97
+ * Normalize trace ID to 32 hex characters
98
+ */
99
+ function normalizeTraceId(id) {
100
+ if (!id) return '00000000000000000000000000000000';
101
+ const clean = id.replace(/-/g, '').toLowerCase();
102
+ if (clean.length >= 32) return clean.slice(0, 32);
103
+ return clean.padStart(32, '0');
104
+ }
105
+
106
+ /**
107
+ * Normalize span ID to 16 hex characters
108
+ */
109
+ function normalizeSpanId(id) {
110
+ if (!id) return '0000000000000000';
111
+ const clean = id.replace(/-/g, '').toLowerCase();
112
+ if (clean.length >= 16) return clean.slice(0, 16);
113
+ return clean.padStart(16, '0');
114
+ }
115
+
116
+ /**
117
+ * Convert LLMFlow log to OTLP log record format
118
+ */
119
+ function logToOtlpRecord(logEntry) {
120
+ const timeNano = BigInt(logEntry.timestamp) * BigInt(1000000);
121
+
122
+ const attributes = [];
123
+ if (logEntry.event_name) {
124
+ attributes.push({ key: 'event.name', value: { stringValue: logEntry.event_name } });
125
+ }
126
+
127
+ if (logEntry.attributes && typeof logEntry.attributes === 'object') {
128
+ Object.entries(logEntry.attributes).forEach(([key, value]) => {
129
+ if (typeof value === 'string') {
130
+ attributes.push({ key, value: { stringValue: value } });
131
+ } else if (typeof value === 'number') {
132
+ attributes.push({ key, value: { doubleValue: value } });
133
+ } else if (typeof value === 'boolean') {
134
+ attributes.push({ key, value: { boolValue: value } });
135
+ }
136
+ });
137
+ }
138
+
139
+ return {
140
+ timeUnixNano: timeNano.toString(),
141
+ observedTimeUnixNano: logEntry.observed_timestamp
142
+ ? (BigInt(logEntry.observed_timestamp) * BigInt(1000000)).toString()
143
+ : timeNano.toString(),
144
+ severityNumber: logEntry.severity_number || 9, // INFO
145
+ severityText: logEntry.severity_text || 'INFO',
146
+ body: logEntry.body ? { stringValue: logEntry.body } : undefined,
147
+ attributes,
148
+ traceId: logEntry.trace_id ? normalizeTraceId(logEntry.trace_id) : undefined,
149
+ spanId: logEntry.span_id ? normalizeSpanId(logEntry.span_id) : undefined
150
+ };
151
+ }
152
+
153
+ /**
154
+ * Convert LLMFlow metric to OTLP metric format
155
+ */
156
+ function metricToOtlpMetric(metric) {
157
+ const timeNano = BigInt(metric.timestamp) * BigInt(1000000);
158
+
159
+ const attributes = [];
160
+ if (metric.attributes && typeof metric.attributes === 'object') {
161
+ Object.entries(metric.attributes).forEach(([key, value]) => {
162
+ if (typeof value === 'string') {
163
+ attributes.push({ key, value: { stringValue: value } });
164
+ } else if (typeof value === 'number') {
165
+ attributes.push({ key, value: { doubleValue: value } });
166
+ }
167
+ });
168
+ }
169
+
170
+ const dataPoint = {
171
+ attributes,
172
+ timeUnixNano: timeNano.toString()
173
+ };
174
+
175
+ if (metric.value_int !== null && metric.value_int !== undefined) {
176
+ dataPoint.asInt = String(metric.value_int);
177
+ } else if (metric.value_double !== null && metric.value_double !== undefined) {
178
+ dataPoint.asDouble = metric.value_double;
179
+ }
180
+
181
+ const metricData = {
182
+ name: metric.name,
183
+ description: metric.description || '',
184
+ unit: metric.unit || ''
185
+ };
186
+
187
+ switch (metric.metric_type) {
188
+ case 'sum':
189
+ metricData.sum = {
190
+ dataPoints: [dataPoint],
191
+ aggregationTemporality: 2, // CUMULATIVE
192
+ isMonotonic: true
193
+ };
194
+ break;
195
+ case 'histogram':
196
+ metricData.histogram = {
197
+ dataPoints: [{
198
+ ...dataPoint,
199
+ count: metric.histogram_data?.count || 0,
200
+ sum: metric.histogram_data?.sum || 0,
201
+ bucketCounts: metric.histogram_data?.bucketCounts || [],
202
+ explicitBounds: metric.histogram_data?.explicitBounds || []
203
+ }],
204
+ aggregationTemporality: 2
205
+ };
206
+ break;
207
+ case 'gauge':
208
+ default:
209
+ metricData.gauge = {
210
+ dataPoints: [dataPoint]
211
+ };
212
+ }
213
+
214
+ return metricData;
215
+ }
216
+
217
+ /**
218
+ * Build OTLP export payload for traces
219
+ */
220
+ function buildTracesPayload(traces, serviceName = 'llmflow') {
221
+ const spans = traces.map(traceToOtlpSpan);
222
+
223
+ return {
224
+ resourceSpans: [{
225
+ resource: {
226
+ attributes: [
227
+ { key: 'service.name', value: { stringValue: serviceName } },
228
+ { key: 'service.version', value: { stringValue: process.env.npm_package_version || '0.3.0' } },
229
+ { key: 'telemetry.sdk.name', value: { stringValue: 'llmflow' } }
230
+ ]
231
+ },
232
+ scopeSpans: [{
233
+ scope: {
234
+ name: 'llmflow',
235
+ version: process.env.npm_package_version || '0.3.0'
236
+ },
237
+ spans
238
+ }]
239
+ }]
240
+ };
241
+ }
242
+
243
+ /**
244
+ * Build OTLP export payload for logs
245
+ */
246
+ function buildLogsPayload(logs, serviceName = 'llmflow') {
247
+ const logRecords = logs.map(logToOtlpRecord);
248
+
249
+ return {
250
+ resourceLogs: [{
251
+ resource: {
252
+ attributes: [
253
+ { key: 'service.name', value: { stringValue: serviceName } }
254
+ ]
255
+ },
256
+ scopeLogs: [{
257
+ scope: {
258
+ name: 'llmflow'
259
+ },
260
+ logRecords
261
+ }]
262
+ }]
263
+ };
264
+ }
265
+
266
+ /**
267
+ * Build OTLP export payload for metrics
268
+ */
269
+ function buildMetricsPayload(metrics, serviceName = 'llmflow') {
270
+ const otlpMetrics = metrics.map(metricToOtlpMetric);
271
+
272
+ return {
273
+ resourceMetrics: [{
274
+ resource: {
275
+ attributes: [
276
+ { key: 'service.name', value: { stringValue: serviceName } }
277
+ ]
278
+ },
279
+ scopeMetrics: [{
280
+ scope: {
281
+ name: 'llmflow'
282
+ },
283
+ metrics: otlpMetrics
284
+ }]
285
+ }]
286
+ };
287
+ }
288
+
289
+ /**
290
+ * Send data to OTLP endpoint
291
+ */
292
+ async function sendToEndpoint(endpoint, payload) {
293
+ if (!endpoint) return { success: false, error: 'No endpoint configured' };
294
+
295
+ return new Promise((resolve) => {
296
+ const url = new URL(endpoint);
297
+ const isHttps = url.protocol === 'https:';
298
+ const httpModule = isHttps ? https : http;
299
+
300
+ const postData = JSON.stringify(payload);
301
+
302
+ const options = {
303
+ hostname: url.hostname,
304
+ port: url.port || (isHttps ? 443 : 80),
305
+ path: url.pathname + url.search,
306
+ method: 'POST',
307
+ headers: {
308
+ 'Content-Type': 'application/json',
309
+ 'Content-Length': Buffer.byteLength(postData),
310
+ ...EXPORT_HEADERS
311
+ }
312
+ };
313
+
314
+ const req = httpModule.request(options, (res) => {
315
+ let data = '';
316
+ res.on('data', chunk => data += chunk);
317
+ res.on('end', () => {
318
+ if (res.statusCode >= 200 && res.statusCode < 300) {
319
+ resolve({ success: true, status: res.statusCode });
320
+ } else {
321
+ resolve({ success: false, status: res.statusCode, error: data });
322
+ }
323
+ });
324
+ });
325
+
326
+ req.on('error', (err) => {
327
+ resolve({ success: false, error: err.message });
328
+ });
329
+
330
+ req.setTimeout(10000, () => {
331
+ req.destroy();
332
+ resolve({ success: false, error: 'Request timeout' });
333
+ });
334
+
335
+ req.write(postData);
336
+ req.end();
337
+ });
338
+ }
339
+
340
+ /**
341
+ * Export traces to external backend
342
+ */
343
+ async function exportTraces(traces) {
344
+ if (!EXPORT_ENDPOINTS.traces || traces.length === 0) return;
345
+
346
+ const payload = buildTracesPayload(traces);
347
+ const result = await sendToEndpoint(EXPORT_ENDPOINTS.traces, payload);
348
+
349
+ if (result.success) {
350
+ log.debug(`Exported ${traces.length} traces to ${EXPORT_ENDPOINTS.traces}`);
351
+ } else {
352
+ log.error(`Failed to export traces: ${result.error}`);
353
+ }
354
+
355
+ return result;
356
+ }
357
+
358
+ /**
359
+ * Export logs to external backend
360
+ */
361
+ async function exportLogs(logs) {
362
+ if (!EXPORT_ENDPOINTS.logs || logs.length === 0) return;
363
+
364
+ const payload = buildLogsPayload(logs);
365
+ const result = await sendToEndpoint(EXPORT_ENDPOINTS.logs, payload);
366
+
367
+ if (result.success) {
368
+ log.debug(`Exported ${logs.length} logs to ${EXPORT_ENDPOINTS.logs}`);
369
+ } else {
370
+ log.error(`Failed to export logs: ${result.error}`);
371
+ }
372
+
373
+ return result;
374
+ }
375
+
376
+ /**
377
+ * Export metrics to external backend
378
+ */
379
+ async function exportMetrics(metrics) {
380
+ if (!EXPORT_ENDPOINTS.metrics || metrics.length === 0) return;
381
+
382
+ const payload = buildMetricsPayload(metrics);
383
+ const result = await sendToEndpoint(EXPORT_ENDPOINTS.metrics, payload);
384
+
385
+ if (result.success) {
386
+ log.debug(`Exported ${metrics.length} metrics to ${EXPORT_ENDPOINTS.metrics}`);
387
+ } else {
388
+ log.error(`Failed to export metrics: ${result.error}`);
389
+ }
390
+
391
+ return result;
392
+ }
393
+
394
+ /**
395
+ * Queue a trace for batched export
396
+ */
397
+ function queueTrace(trace) {
398
+ if (!EXPORT_ENABLED || !EXPORT_ENDPOINTS.traces) return;
399
+
400
+ traceBatch.push(trace);
401
+
402
+ if (traceBatch.length >= BATCH_SIZE) {
403
+ flushTraces();
404
+ } else {
405
+ scheduleFlush();
406
+ }
407
+ }
408
+
409
+ /**
410
+ * Queue a log for batched export
411
+ */
412
+ function queueLog(logEntry) {
413
+ if (!EXPORT_ENABLED || !EXPORT_ENDPOINTS.logs) return;
414
+
415
+ logBatch.push(logEntry);
416
+
417
+ if (logBatch.length >= BATCH_SIZE) {
418
+ flushLogs();
419
+ } else {
420
+ scheduleFlush();
421
+ }
422
+ }
423
+
424
+ /**
425
+ * Queue a metric for batched export
426
+ */
427
+ function queueMetric(metric) {
428
+ if (!EXPORT_ENABLED || !EXPORT_ENDPOINTS.metrics) return;
429
+
430
+ metricBatch.push(metric);
431
+
432
+ if (metricBatch.length >= BATCH_SIZE) {
433
+ flushMetrics();
434
+ } else {
435
+ scheduleFlush();
436
+ }
437
+ }
438
+
439
+ /**
440
+ * Flush traces batch
441
+ */
442
+ async function flushTraces() {
443
+ if (traceBatch.length === 0) return;
444
+
445
+ const batch = traceBatch;
446
+ traceBatch = [];
447
+
448
+ await exportTraces(batch);
449
+ }
450
+
451
+ /**
452
+ * Flush logs batch
453
+ */
454
+ async function flushLogs() {
455
+ if (logBatch.length === 0) return;
456
+
457
+ const batch = logBatch;
458
+ logBatch = [];
459
+
460
+ await exportLogs(batch);
461
+ }
462
+
463
+ /**
464
+ * Flush metrics batch
465
+ */
466
+ async function flushMetrics() {
467
+ if (metricBatch.length === 0) return;
468
+
469
+ const batch = metricBatch;
470
+ metricBatch = [];
471
+
472
+ await exportMetrics(batch);
473
+ }
474
+
475
+ /**
476
+ * Flush all batches
477
+ */
478
+ async function flushAll() {
479
+ await Promise.all([
480
+ flushTraces(),
481
+ flushLogs(),
482
+ flushMetrics()
483
+ ]);
484
+ }
485
+
486
+ /**
487
+ * Schedule periodic flush
488
+ */
489
+ function scheduleFlush() {
490
+ if (flushTimer) return;
491
+
492
+ flushTimer = setTimeout(async () => {
493
+ flushTimer = null;
494
+ await flushAll();
495
+ }, FLUSH_INTERVAL_MS);
496
+ }
497
+
498
+ /**
499
+ * Get export configuration
500
+ */
501
+ function getConfig() {
502
+ return {
503
+ enabled: EXPORT_ENABLED,
504
+ endpoints: EXPORT_ENDPOINTS,
505
+ batchSize: BATCH_SIZE,
506
+ flushIntervalMs: FLUSH_INTERVAL_MS,
507
+ hasTraces: !!EXPORT_ENDPOINTS.traces,
508
+ hasLogs: !!EXPORT_ENDPOINTS.logs,
509
+ hasMetrics: !!EXPORT_ENDPOINTS.metrics
510
+ };
511
+ }
512
+
513
+ /**
514
+ * Initialize export hooks
515
+ */
516
+ function initExportHooks(db) {
517
+ if (!EXPORT_ENABLED) {
518
+ log.info('OTLP export disabled');
519
+ return;
520
+ }
521
+
522
+ log.info(`OTLP export enabled: traces=${!!EXPORT_ENDPOINTS.traces}, logs=${!!EXPORT_ENDPOINTS.logs}, metrics=${!!EXPORT_ENDPOINTS.metrics}`);
523
+
524
+ if (EXPORT_ENDPOINTS.traces) {
525
+ db.setInsertTraceHook((trace) => {
526
+ queueTrace(trace);
527
+ });
528
+ }
529
+
530
+ if (EXPORT_ENDPOINTS.logs) {
531
+ db.setInsertLogHook((logEntry) => {
532
+ queueLog(logEntry);
533
+ });
534
+ }
535
+
536
+ if (EXPORT_ENDPOINTS.metrics) {
537
+ db.setInsertMetricHook((metric) => {
538
+ queueMetric(metric);
539
+ });
540
+ }
541
+
542
+ process.on('beforeExit', async () => {
543
+ await flushAll();
544
+ });
545
+ }
546
+
547
+ module.exports = {
548
+ exportTraces,
549
+ exportLogs,
550
+ exportMetrics,
551
+ queueTrace,
552
+ queueLog,
553
+ queueMetric,
554
+ flushAll,
555
+ getConfig,
556
+ initExportHooks,
557
+ buildTracesPayload,
558
+ buildLogsPayload,
559
+ buildMetricsPayload,
560
+ traceToOtlpSpan,
561
+ logToOtlpRecord,
562
+ metricToOtlpMetric,
563
+ EXPORT_ENABLED
564
+ };