iii-sdk 0.0.2-alpha

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1064 @@
1
+ const require_index = require('./index.cjs');
2
+ let __opentelemetry_api = require("@opentelemetry/api");
3
+ let ws = require("ws");
4
+ let __opentelemetry_api_logs = require("@opentelemetry/api-logs");
5
+ let __opentelemetry_resources = require("@opentelemetry/resources");
6
+ let __opentelemetry_semantic_conventions = require("@opentelemetry/semantic-conventions");
7
+ let crypto = require("crypto");
8
+ let __opentelemetry_sdk_trace_base = require("@opentelemetry/sdk-trace-base");
9
+ let __opentelemetry_sdk_metrics = require("@opentelemetry/sdk-metrics");
10
+ let __opentelemetry_core = require("@opentelemetry/core");
11
+ let __opentelemetry_sdk_trace_node = require("@opentelemetry/sdk-trace-node");
12
+ let __opentelemetry_instrumentation = require("@opentelemetry/instrumentation");
13
+ let __opentelemetry_sdk_logs = require("@opentelemetry/sdk-logs");
14
+ let __opentelemetry_otlp_transformer = require("@opentelemetry/otlp-transformer");
15
+ let node_perf_hooks = require("node:perf_hooks");
16
+
17
+ //#region src/iii-constants.ts
18
+ /**
19
+ * Constants for the III module.
20
+ */
21
+ /** Engine function paths for internal operations */
22
+ const EngineFunctions = {
23
+ LIST_FUNCTIONS: "engine.functions.list",
24
+ LIST_WORKERS: "engine.workers.list",
25
+ REGISTER_WORKER: "engine.workers.register"
26
+ };
27
+ /** Engine trigger types */
28
+ const EngineTriggers = {
29
+ FUNCTIONS_AVAILABLE: "engine::functions-available",
30
+ LOG: "log"
31
+ };
32
+ /** Log function paths */
33
+ const LogFunctions = {
34
+ INFO: "log.info",
35
+ WARN: "log.warn",
36
+ ERROR: "log.error",
37
+ DEBUG: "log.debug"
38
+ };
39
+ /** Default reconnection configuration */
40
+ const DEFAULT_BRIDGE_RECONNECTION_CONFIG = {
41
+ initialDelayMs: 1e3,
42
+ maxDelayMs: 3e4,
43
+ backoffMultiplier: 2,
44
+ jitterFactor: .3,
45
+ maxRetries: -1
46
+ };
47
+ /** Default invocation timeout in milliseconds */
48
+ const DEFAULT_INVOCATION_TIMEOUT_MS = 3e4;
49
+
50
+ //#endregion
51
+ //#region src/telemetry-system/types.ts
52
+ const ATTR_SERVICE_VERSION = "service.version";
53
+ const ATTR_SERVICE_NAMESPACE = "service.namespace";
54
+ const ATTR_SERVICE_INSTANCE_ID = "service.instance.id";
55
+ /** Magic prefixes for binary frames over WebSocket */
56
+ const PREFIX_TRACES = "OTLP";
57
+ const PREFIX_METRICS = "MTRC";
58
+ const PREFIX_LOGS = "LOGS";
59
+ /** Default reconnection configuration */
60
+ const DEFAULT_RECONNECTION_CONFIG = {
61
+ initialDelayMs: 1e3,
62
+ maxDelayMs: 3e4,
63
+ backoffMultiplier: 2,
64
+ jitterFactor: .3,
65
+ maxRetries: -1
66
+ };
67
+
68
+ //#endregion
69
+ //#region src/telemetry-system/connection.ts
70
+ /**
71
+ * Shared WebSocket connection for OpenTelemetry exporters.
72
+ */
73
+ /**
74
+ * Shared WebSocket connection for all OTEL exporters (traces, metrics, logs).
75
+ * Uses a single connection with message prefixes to identify signal type.
76
+ */
77
+ var SharedEngineConnection = class SharedEngineConnection {
78
+ static {
79
+ this.MAX_PENDING_MESSAGES = 1e3;
80
+ }
81
+ constructor(wsUrl, config = {}) {
82
+ this.ws = null;
83
+ this.connecting = false;
84
+ this.shuttingDown = false;
85
+ this.pendingMessages = [];
86
+ this.reconnectAttempt = 0;
87
+ this.reconnectTimeout = null;
88
+ this.state = "disconnected";
89
+ this.onConnectedCallbacks = [];
90
+ this.wsUrl = wsUrl;
91
+ this.config = {
92
+ ...DEFAULT_RECONNECTION_CONFIG,
93
+ ...config
94
+ };
95
+ this.connect();
96
+ }
97
+ connect() {
98
+ if (this.connecting || this.ws && this.ws.readyState === ws.WebSocket.OPEN) return;
99
+ this.connecting = true;
100
+ this.state = "connecting";
101
+ try {
102
+ this.ws = new ws.WebSocket(this.wsUrl);
103
+ this.ws.on("open", () => {
104
+ this.connecting = false;
105
+ this.state = "connected";
106
+ console.log(`[OTel] Connected to engine at ${this.wsUrl}`);
107
+ if (this.reconnectAttempt > 0) console.log("[OTel] Successfully reconnected");
108
+ this.reconnectAttempt = 0;
109
+ if (this.reconnectTimeout) {
110
+ clearTimeout(this.reconnectTimeout);
111
+ this.reconnectTimeout = null;
112
+ }
113
+ const pending = this.pendingMessages.splice(0, this.pendingMessages.length);
114
+ for (const { frame, callback } of pending) this.ws?.send(frame, (err) => callback?.(err));
115
+ for (const cb of this.onConnectedCallbacks) cb();
116
+ });
117
+ this.ws.on("close", () => {
118
+ this.connecting = false;
119
+ this.ws = null;
120
+ if (this.shuttingDown) {
121
+ this.state = "disconnected";
122
+ console.log("[OTel] Connection closed during shutdown");
123
+ return;
124
+ }
125
+ this.state = "disconnected";
126
+ console.log("[OTel] Disconnected from engine, will reconnect...");
127
+ this.scheduleReconnect();
128
+ });
129
+ this.ws.on("error", (err) => {
130
+ this.connecting = false;
131
+ if (this.shuttingDown) return;
132
+ console.error("[OTel] WebSocket error:", err.message);
133
+ });
134
+ } catch (err) {
135
+ this.connecting = false;
136
+ console.error("[OTel] Connection failed:", err);
137
+ this.scheduleReconnect();
138
+ }
139
+ }
140
+ scheduleReconnect() {
141
+ if (this.config.maxRetries !== -1 && this.reconnectAttempt >= this.config.maxRetries) {
142
+ this.state = "failed";
143
+ console.error(`[OTel] Max retries (${this.config.maxRetries}) reached, giving up`);
144
+ const pending = this.pendingMessages.splice(0, this.pendingMessages.length);
145
+ const failedError = /* @__PURE__ */ new Error("Connection failed after max retries");
146
+ for (const { callback } of pending) callback?.(failedError);
147
+ return;
148
+ }
149
+ if (this.reconnectTimeout) return;
150
+ const exponentialDelay = this.config.initialDelayMs * this.config.backoffMultiplier ** this.reconnectAttempt;
151
+ const cappedDelay = Math.min(exponentialDelay, this.config.maxDelayMs);
152
+ const jitter = cappedDelay * this.config.jitterFactor * (2 * Math.random() - 1);
153
+ const delay = Math.max(0, Math.floor(cappedDelay + jitter));
154
+ this.state = "reconnecting";
155
+ console.log(`[OTel] Reconnecting in ${delay}ms (attempt ${this.reconnectAttempt + 1})...`);
156
+ this.reconnectTimeout = setTimeout(() => {
157
+ this.reconnectTimeout = null;
158
+ this.reconnectAttempt++;
159
+ this.connect();
160
+ }, delay);
161
+ }
162
+ /**
163
+ * Send a message with a signal prefix.
164
+ */
165
+ send(prefix, data, callback) {
166
+ const prefixBytes = Buffer.from(prefix, "utf-8");
167
+ const frame = Buffer.concat([prefixBytes, Buffer.from(data)]);
168
+ if (this.ws && this.ws.readyState === ws.WebSocket.OPEN) this.ws.send(frame, callback);
169
+ else {
170
+ if (this.pendingMessages.length >= SharedEngineConnection.MAX_PENDING_MESSAGES) {
171
+ console.warn("[OTel] Pending message queue full, dropping oldest message");
172
+ this.pendingMessages.shift()?.callback?.(/* @__PURE__ */ new Error("Message dropped due to queue overflow"));
173
+ }
174
+ this.pendingMessages.push({
175
+ frame,
176
+ callback
177
+ });
178
+ this.connect();
179
+ }
180
+ }
181
+ /**
182
+ * Register a callback to be called when connected.
183
+ */
184
+ onConnected(callback) {
185
+ this.onConnectedCallbacks.push(callback);
186
+ if (this.state === "connected") callback();
187
+ }
188
+ /**
189
+ * Get the current connection state.
190
+ */
191
+ getState() {
192
+ return this.state;
193
+ }
194
+ /**
195
+ * Shutdown the connection.
196
+ */
197
+ async shutdown() {
198
+ this.shuttingDown = true;
199
+ if (this.reconnectTimeout) {
200
+ clearTimeout(this.reconnectTimeout);
201
+ this.reconnectTimeout = null;
202
+ }
203
+ if (this.ws) {
204
+ this.ws.close();
205
+ this.ws = null;
206
+ }
207
+ const pending = this.pendingMessages.splice(0, this.pendingMessages.length);
208
+ const shutdownError = /* @__PURE__ */ new Error("Connection shutdown before message could be sent");
209
+ for (const { callback } of pending) callback?.(shutdownError);
210
+ this.onConnectedCallbacks = [];
211
+ this.state = "disconnected";
212
+ }
213
+ };
214
+
215
+ //#endregion
216
+ //#region src/telemetry-system/span-exporter.ts
217
+ /**
218
+ * Span exporter for the III Engine.
219
+ */
220
+ /**
221
+ * Span exporter using the shared WebSocket connection.
222
+ */
223
+ var EngineSpanExporter = class EngineSpanExporter {
224
+ static {
225
+ this.MAX_PENDING_EXPORTS = 100;
226
+ }
227
+ constructor(connection) {
228
+ this.pendingExports = [];
229
+ this.connection = connection;
230
+ this.connection.onConnected(() => this.flushPending());
231
+ }
232
+ flushPending() {
233
+ const pending = this.pendingExports.splice(0, this.pendingExports.length);
234
+ for (const { spans, resultCallback } of pending) this.sendExport(spans, resultCallback);
235
+ }
236
+ sendExport(spans, resultCallback) {
237
+ try {
238
+ const serialized = __opentelemetry_otlp_transformer.JsonTraceSerializer.serializeRequest(spans);
239
+ if (!serialized) {
240
+ resultCallback?.({ code: __opentelemetry_core.ExportResultCode.SUCCESS });
241
+ return;
242
+ }
243
+ this.connection.send(PREFIX_TRACES, serialized, (err) => {
244
+ if (err) {
245
+ console.error("[OTel] Failed to send spans:", err.message);
246
+ resultCallback?.({
247
+ code: __opentelemetry_core.ExportResultCode.FAILED,
248
+ error: err
249
+ });
250
+ } else resultCallback?.({ code: __opentelemetry_core.ExportResultCode.SUCCESS });
251
+ });
252
+ } catch (err) {
253
+ console.error("[OTel] Error exporting spans:", err);
254
+ resultCallback?.({
255
+ code: __opentelemetry_core.ExportResultCode.FAILED,
256
+ error: err
257
+ });
258
+ }
259
+ }
260
+ doExport(spans, resultCallback) {
261
+ if (this.connection.getState() !== "connected") {
262
+ if (this.pendingExports.length >= EngineSpanExporter.MAX_PENDING_EXPORTS) {
263
+ this.pendingExports.shift()?.resultCallback?.({
264
+ code: __opentelemetry_core.ExportResultCode.FAILED,
265
+ error: /* @__PURE__ */ new Error("Queue overflow")
266
+ });
267
+ console.warn("[OTel] Spans export queue full, dropped oldest entry");
268
+ }
269
+ this.pendingExports.push({
270
+ spans,
271
+ resultCallback
272
+ });
273
+ return;
274
+ }
275
+ this.sendExport(spans, resultCallback);
276
+ }
277
+ export(spans, resultCallback) {
278
+ this.doExport(spans, resultCallback);
279
+ }
280
+ async shutdown() {
281
+ const pending = this.pendingExports.splice(0, this.pendingExports.length);
282
+ const shutdownError = /* @__PURE__ */ new Error("Exporter shutdown before export completed");
283
+ for (const { resultCallback } of pending) resultCallback?.({
284
+ code: __opentelemetry_core.ExportResultCode.FAILED,
285
+ error: shutdownError
286
+ });
287
+ }
288
+ async forceFlush() {}
289
+ };
290
+
291
+ //#endregion
292
+ //#region src/telemetry-system/metrics-exporter.ts
293
+ /**
294
+ * Metrics exporter for the III Engine.
295
+ */
296
+ /**
297
+ * Metrics exporter using the shared WebSocket connection.
298
+ */
299
+ var EngineMetricsExporter = class EngineMetricsExporter {
300
+ static {
301
+ this.MAX_PENDING_EXPORTS = 100;
302
+ }
303
+ constructor(connection) {
304
+ this.pendingExports = [];
305
+ this.connection = connection;
306
+ this.connection.onConnected(() => this.flushPending());
307
+ }
308
+ flushPending() {
309
+ const pending = this.pendingExports.splice(0, this.pendingExports.length);
310
+ for (const { metrics: metrics$1, resultCallback } of pending) this.sendExport(metrics$1, resultCallback);
311
+ }
312
+ sendExport(metricsData, resultCallback) {
313
+ try {
314
+ const serialized = __opentelemetry_otlp_transformer.JsonMetricsSerializer.serializeRequest(metricsData);
315
+ if (!serialized) {
316
+ resultCallback?.({ code: __opentelemetry_core.ExportResultCode.SUCCESS });
317
+ return;
318
+ }
319
+ this.connection.send(PREFIX_METRICS, serialized, (err) => {
320
+ if (err) {
321
+ console.error("[OTel] Failed to send metrics:", err.message);
322
+ resultCallback?.({
323
+ code: __opentelemetry_core.ExportResultCode.FAILED,
324
+ error: err
325
+ });
326
+ } else resultCallback?.({ code: __opentelemetry_core.ExportResultCode.SUCCESS });
327
+ });
328
+ } catch (err) {
329
+ console.error("[OTel] Error exporting metrics:", err);
330
+ resultCallback?.({
331
+ code: __opentelemetry_core.ExportResultCode.FAILED,
332
+ error: err
333
+ });
334
+ }
335
+ }
336
+ doExport(metricsData, resultCallback) {
337
+ if (this.connection.getState() !== "connected") {
338
+ if (this.pendingExports.length >= EngineMetricsExporter.MAX_PENDING_EXPORTS) {
339
+ this.pendingExports.shift()?.resultCallback?.({
340
+ code: __opentelemetry_core.ExportResultCode.FAILED,
341
+ error: /* @__PURE__ */ new Error("Queue overflow")
342
+ });
343
+ console.warn("[OTel] Metrics export queue full, dropped oldest entry");
344
+ }
345
+ this.pendingExports.push({
346
+ metrics: metricsData,
347
+ resultCallback
348
+ });
349
+ return;
350
+ }
351
+ this.sendExport(metricsData, resultCallback);
352
+ }
353
+ export(metrics$1, resultCallback) {
354
+ this.doExport(metrics$1, resultCallback);
355
+ }
356
+ async shutdown() {
357
+ const pending = this.pendingExports.splice(0, this.pendingExports.length);
358
+ const shutdownError = /* @__PURE__ */ new Error("Exporter shutdown before export completed");
359
+ for (const { resultCallback } of pending) resultCallback?.({
360
+ code: __opentelemetry_core.ExportResultCode.FAILED,
361
+ error: shutdownError
362
+ });
363
+ }
364
+ async forceFlush() {}
365
+ };
366
+
367
+ //#endregion
368
+ //#region src/telemetry-system/log-exporter.ts
369
+ /**
370
+ * Log exporter for the III Engine.
371
+ */
372
+ /**
373
+ * Log exporter using the shared WebSocket connection.
374
+ */
375
+ var EngineLogExporter = class {
376
+ constructor(connection) {
377
+ this.pendingExports = [];
378
+ this.connection = connection;
379
+ this.connection.onConnected(() => this.flushPending());
380
+ }
381
+ flushPending() {
382
+ const pending = this.pendingExports.splice(0, this.pendingExports.length);
383
+ for (const { logs, callback } of pending) this.doExport(logs, callback);
384
+ }
385
+ doExport(logs, resultCallback) {
386
+ if (this.connection.getState() !== "connected") {
387
+ this.pendingExports.push({
388
+ logs,
389
+ callback: resultCallback
390
+ });
391
+ return;
392
+ }
393
+ try {
394
+ const serialized = __opentelemetry_otlp_transformer.JsonLogsSerializer.serializeRequest(logs);
395
+ if (!serialized) {
396
+ resultCallback({ code: __opentelemetry_core.ExportResultCode.SUCCESS });
397
+ return;
398
+ }
399
+ this.connection.send(PREFIX_LOGS, serialized, (err) => {
400
+ if (err) {
401
+ console.error("[OTel] Failed to send logs:", err.message);
402
+ resultCallback({
403
+ code: __opentelemetry_core.ExportResultCode.FAILED,
404
+ error: err
405
+ });
406
+ } else resultCallback({ code: __opentelemetry_core.ExportResultCode.SUCCESS });
407
+ });
408
+ } catch (err) {
409
+ console.error("[OTel] Error exporting logs:", err);
410
+ resultCallback({
411
+ code: __opentelemetry_core.ExportResultCode.FAILED,
412
+ error: err
413
+ });
414
+ }
415
+ }
416
+ export(logs, resultCallback) {
417
+ this.doExport(logs, resultCallback);
418
+ }
419
+ async shutdown() {
420
+ for (const { callback } of this.pendingExports) callback({
421
+ code: __opentelemetry_core.ExportResultCode.FAILED,
422
+ error: /* @__PURE__ */ new Error("Exporter shutdown")
423
+ });
424
+ this.pendingExports = [];
425
+ }
426
+ };
427
+
428
+ //#endregion
429
+ //#region src/telemetry-system/context.ts
430
+ /**
431
+ * Trace context and baggage propagation utilities.
432
+ */
433
+ /**
434
+ * Extract the current trace ID from the active span context.
435
+ */
436
+ function currentTraceId() {
437
+ const span = __opentelemetry_api.trace.getActiveSpan();
438
+ if (span) {
439
+ const spanContext = span.spanContext();
440
+ if (spanContext.traceId && spanContext.traceId !== "00000000000000000000000000000000") return spanContext.traceId;
441
+ }
442
+ }
443
+ /**
444
+ * Extract the current span ID from the active span context.
445
+ */
446
+ function currentSpanId() {
447
+ const span = __opentelemetry_api.trace.getActiveSpan();
448
+ if (span) {
449
+ const spanContext = span.spanContext();
450
+ if (spanContext.spanId && spanContext.spanId !== "0000000000000000") return spanContext.spanId;
451
+ }
452
+ }
453
+ /**
454
+ * Inject the current trace context into a W3C traceparent header string.
455
+ */
456
+ function injectTraceparent() {
457
+ const carrier = {};
458
+ __opentelemetry_api.propagation.inject(__opentelemetry_api.context.active(), carrier);
459
+ return carrier["traceparent"];
460
+ }
461
+ /**
462
+ * Extract a trace context from a W3C traceparent header string.
463
+ */
464
+ function extractTraceparent(traceparent) {
465
+ const carrier = { traceparent };
466
+ return __opentelemetry_api.propagation.extract(__opentelemetry_api.context.active(), carrier);
467
+ }
468
+ /**
469
+ * Inject the current baggage into a W3C baggage header string.
470
+ */
471
+ function injectBaggage() {
472
+ const carrier = {};
473
+ __opentelemetry_api.propagation.inject(__opentelemetry_api.context.active(), carrier);
474
+ return carrier["baggage"];
475
+ }
476
+ /**
477
+ * Extract baggage from a W3C baggage header string.
478
+ */
479
+ function extractBaggage(baggage) {
480
+ const carrier = { baggage };
481
+ return __opentelemetry_api.propagation.extract(__opentelemetry_api.context.active(), carrier);
482
+ }
483
+ /**
484
+ * Extract both trace context and baggage from their respective headers.
485
+ */
486
+ function extractContext(traceparent, baggage) {
487
+ const carrier = {};
488
+ if (traceparent) carrier["traceparent"] = traceparent;
489
+ if (baggage) carrier["baggage"] = baggage;
490
+ return __opentelemetry_api.propagation.extract(__opentelemetry_api.context.active(), carrier);
491
+ }
492
+ /**
493
+ * Get a baggage entry from the current context.
494
+ */
495
+ function getBaggageEntry(key) {
496
+ return __opentelemetry_api.propagation.getBaggage(__opentelemetry_api.context.active())?.getEntry(key)?.value;
497
+ }
498
+ /**
499
+ * Set a baggage entry in the current context.
500
+ */
501
+ function setBaggageEntry(key, value) {
502
+ let bag = __opentelemetry_api.propagation.getBaggage(__opentelemetry_api.context.active()) ?? __opentelemetry_api.propagation.createBaggage();
503
+ bag = bag.setEntry(key, { value });
504
+ return __opentelemetry_api.propagation.setBaggage(__opentelemetry_api.context.active(), bag);
505
+ }
506
+ /**
507
+ * Remove a baggage entry from the current context.
508
+ */
509
+ function removeBaggageEntry(key) {
510
+ const bag = __opentelemetry_api.propagation.getBaggage(__opentelemetry_api.context.active());
511
+ if (!bag) return __opentelemetry_api.context.active();
512
+ const newBag = bag.removeEntry(key);
513
+ return __opentelemetry_api.propagation.setBaggage(__opentelemetry_api.context.active(), newBag);
514
+ }
515
+ /**
516
+ * Get all baggage entries from the current context.
517
+ */
518
+ function getAllBaggage() {
519
+ const bag = __opentelemetry_api.propagation.getBaggage(__opentelemetry_api.context.active());
520
+ if (!bag) return {};
521
+ const entries = {};
522
+ for (const [key, entry] of bag.getAllEntries()) entries[key] = entry.value;
523
+ return entries;
524
+ }
525
+
526
+ //#endregion
527
+ //#region src/telemetry-system/index.ts
528
+ /**
529
+ * OpenTelemetry initialization for the III Node SDK.
530
+ *
531
+ * This module provides trace, metrics, and log export to the III Engine
532
+ * via a shared WebSocket connection using OTLP JSON format.
533
+ */
534
+ let sharedConnection = null;
535
+ let tracerProvider = null;
536
+ let meterProvider = null;
537
+ let loggerProvider = null;
538
+ let tracer = null;
539
+ let meter = null;
540
+ let logger = null;
541
+ let serviceName = "iii-node-iii";
542
+ /**
543
+ * Initialize OpenTelemetry with the given configuration.
544
+ * This should be called once at application startup.
545
+ */
546
+ function initOtel(config = {}) {
547
+ if (!(config.enabled ?? (process.env.OTEL_ENABLED === "true" || process.env.OTEL_ENABLED === "1"))) {
548
+ console.log("[OTel] OpenTelemetry is disabled");
549
+ return;
550
+ }
551
+ if (config.instrumentations?.length) (0, __opentelemetry_instrumentation.registerInstrumentations)({ instrumentations: config.instrumentations });
552
+ serviceName = config.serviceName ?? process.env.OTEL_SERVICE_NAME ?? "iii-node-iii";
553
+ const serviceVersion = config.serviceVersion ?? process.env.SERVICE_VERSION ?? "unknown";
554
+ const serviceNamespace = config.serviceNamespace ?? process.env.SERVICE_NAMESPACE;
555
+ const serviceInstanceId = config.serviceInstanceId ?? process.env.SERVICE_INSTANCE_ID ?? (0, crypto.randomUUID)();
556
+ const engineWsUrl = config.engineWsUrl ?? process.env.III_BRIDGE_URL ?? "ws://localhost:49134";
557
+ const resourceAttributes = {
558
+ [__opentelemetry_semantic_conventions.ATTR_SERVICE_NAME]: serviceName,
559
+ [ATTR_SERVICE_VERSION]: serviceVersion,
560
+ [ATTR_SERVICE_INSTANCE_ID]: serviceInstanceId
561
+ };
562
+ if (serviceNamespace) resourceAttributes[ATTR_SERVICE_NAMESPACE] = serviceNamespace;
563
+ const resource = new __opentelemetry_resources.Resource(resourceAttributes);
564
+ sharedConnection = new SharedEngineConnection(engineWsUrl, config.reconnectionConfig);
565
+ tracerProvider = new __opentelemetry_sdk_trace_node.NodeTracerProvider({
566
+ resource,
567
+ spanProcessors: [new __opentelemetry_sdk_trace_base.SimpleSpanProcessor(new EngineSpanExporter(sharedConnection))]
568
+ });
569
+ __opentelemetry_api.propagation.setGlobalPropagator(new __opentelemetry_core.CompositePropagator({ propagators: [new __opentelemetry_core.W3CTraceContextPropagator(), new __opentelemetry_core.W3CBaggagePropagator()] }));
570
+ tracerProvider.register();
571
+ tracer = __opentelemetry_api.trace.getTracer(serviceName);
572
+ console.log(`[OTel] Traces initialized: engine=${engineWsUrl}, service=${serviceName}`);
573
+ if (config.metricsEnabled ?? (process.env.OTEL_METRICS_ENABLED === "true" || process.env.OTEL_METRICS_ENABLED === "1")) {
574
+ const metricsExporter = new EngineMetricsExporter(sharedConnection);
575
+ const exportIntervalMs = config.metricsExportIntervalMs ?? 6e4;
576
+ meterProvider = new __opentelemetry_sdk_metrics.MeterProvider({
577
+ resource,
578
+ readers: [new __opentelemetry_sdk_metrics.PeriodicExportingMetricReader({
579
+ exporter: metricsExporter,
580
+ exportIntervalMillis: exportIntervalMs
581
+ })]
582
+ });
583
+ __opentelemetry_api.metrics.setGlobalMeterProvider(meterProvider);
584
+ meter = meterProvider.getMeter(serviceName);
585
+ console.log(`[OTel] Metrics initialized: interval=${exportIntervalMs}ms`);
586
+ }
587
+ const logExporter = new EngineLogExporter(sharedConnection);
588
+ loggerProvider = new __opentelemetry_sdk_logs.LoggerProvider({ resource });
589
+ loggerProvider.addLogRecordProcessor(new __opentelemetry_sdk_logs.SimpleLogRecordProcessor(logExporter));
590
+ logger = loggerProvider.getLogger(serviceName);
591
+ console.log("[OTel] Logs initialized");
592
+ }
593
+ /**
594
+ * Shutdown OpenTelemetry, flushing any pending data.
595
+ */
596
+ async function shutdownOtel() {
597
+ if (tracerProvider) {
598
+ await tracerProvider.shutdown();
599
+ tracerProvider = null;
600
+ }
601
+ if (meterProvider) {
602
+ await meterProvider.shutdown();
603
+ meterProvider = null;
604
+ }
605
+ if (loggerProvider) {
606
+ await loggerProvider.shutdown();
607
+ loggerProvider = null;
608
+ }
609
+ if (sharedConnection) {
610
+ await sharedConnection.shutdown();
611
+ sharedConnection = null;
612
+ }
613
+ tracer = null;
614
+ meter = null;
615
+ logger = null;
616
+ }
617
+ /**
618
+ * Get the OpenTelemetry tracer instance.
619
+ */
620
+ function getTracer() {
621
+ return tracer;
622
+ }
623
+ /**
624
+ * Get the OpenTelemetry meter instance.
625
+ */
626
+ function getMeter() {
627
+ return meter;
628
+ }
629
+ /**
630
+ * Get the OpenTelemetry logger instance.
631
+ */
632
+ function getLogger() {
633
+ return logger;
634
+ }
635
+ /**
636
+ * Start a new span with the given name and run the callback within it.
637
+ */
638
+ async function withSpan(name, options, fn) {
639
+ if (!tracer) {
640
+ const noopSpan = {
641
+ spanContext: () => ({
642
+ traceId: "",
643
+ spanId: "",
644
+ traceFlags: 0
645
+ }),
646
+ setAttribute: () => noopSpan,
647
+ setAttributes: () => noopSpan,
648
+ addEvent: () => noopSpan,
649
+ addLink: () => noopSpan,
650
+ setStatus: () => noopSpan,
651
+ updateName: () => noopSpan,
652
+ end: () => {},
653
+ isRecording: () => false,
654
+ recordException: () => {},
655
+ addLinks: () => noopSpan
656
+ };
657
+ return fn(noopSpan);
658
+ }
659
+ const parentContext = options.traceparent ? extractTraceparent(options.traceparent) : __opentelemetry_api.context.active();
660
+ return tracer.startActiveSpan(name, { kind: options.kind ?? __opentelemetry_api.SpanKind.INTERNAL }, parentContext, async (span) => {
661
+ try {
662
+ const result = await fn(span);
663
+ span.setStatus({ code: __opentelemetry_api.SpanStatusCode.OK });
664
+ return result;
665
+ } catch (error) {
666
+ span.setStatus({
667
+ code: __opentelemetry_api.SpanStatusCode.ERROR,
668
+ message: error.message
669
+ });
670
+ span.recordException(error);
671
+ throw error;
672
+ } finally {
673
+ span.end();
674
+ }
675
+ });
676
+ }
677
+
678
+ //#endregion
679
+ //#region src/utils.ts
680
+ /**
681
+ * Safely stringify a value, handling circular references, BigInt, and other edge cases.
682
+ * Returns "[unserializable]" if serialization fails for any reason.
683
+ */
684
+ function safeStringify(value) {
685
+ const seen = /* @__PURE__ */ new WeakSet();
686
+ try {
687
+ return JSON.stringify(value, (_key, val) => {
688
+ if (typeof val === "bigint") return val.toString();
689
+ if (val !== null && typeof val === "object") {
690
+ if (seen.has(val)) return "[Circular]";
691
+ seen.add(val);
692
+ }
693
+ return val;
694
+ });
695
+ } catch {
696
+ return "[unserializable]";
697
+ }
698
+ }
699
+
700
+ //#endregion
701
+ //#region src/worker-metrics.ts
702
+ /**
703
+ * Worker metrics collection for the III Node SDK.
704
+ *
705
+ * Collects CPU, memory, and event loop metrics for worker health monitoring.
706
+ * Uses the Node.js built-in `monitorEventLoopDelay` API for accurate
707
+ * event loop lag measurements.
708
+ */
709
+ /**
710
+ * Collects worker resource metrics including CPU, memory, and event loop lag.
711
+ *
712
+ * Uses the Node.js `monitorEventLoopDelay` API for high-precision event loop
713
+ * delay measurements instead of manual `setImmediate` timing.
714
+ *
715
+ * @example
716
+ * ```typescript
717
+ * const collector = new WorkerMetricsCollector()
718
+ *
719
+ * // Collect metrics periodically
720
+ * setInterval(() => {
721
+ * const metrics = collector.collect()
722
+ * console.log('CPU:', metrics.cpu_percent, '%')
723
+ * console.log('Event Loop Lag:', metrics.event_loop_lag_ms, 'ms')
724
+ * }, 5000)
725
+ *
726
+ * // Clean up when done
727
+ * collector.stopMonitoring()
728
+ * ```
729
+ */
730
+ var WorkerMetricsCollector = class {
731
+ /**
732
+ * Creates a new WorkerMetricsCollector instance.
733
+ *
734
+ * @param options - Configuration options
735
+ */
736
+ constructor(options = {}) {
737
+ this.eventLoopHistogram = null;
738
+ this.startTime = Date.now();
739
+ this.lastCpuUsage = process.cpuUsage();
740
+ this.lastCpuTime = node_perf_hooks.performance.now();
741
+ this.startEventLoopMonitoring(options.eventLoopResolutionMs ?? 20);
742
+ }
743
+ /**
744
+ * Starts the event loop delay histogram monitoring.
745
+ *
746
+ * @param resolutionMs - Histogram resolution in milliseconds
747
+ */
748
+ startEventLoopMonitoring(resolutionMs) {
749
+ this.eventLoopHistogram = (0, node_perf_hooks.monitorEventLoopDelay)({ resolution: Number.isFinite(resolutionMs) && resolutionMs > 0 ? Math.max(1, Math.floor(resolutionMs)) : 20 });
750
+ this.eventLoopHistogram.enable();
751
+ }
752
+ /**
753
+ * Stops the event loop monitoring and releases resources.
754
+ * Should be called when the collector is no longer needed.
755
+ */
756
+ stopMonitoring() {
757
+ if (this.eventLoopHistogram) {
758
+ this.eventLoopHistogram.disable();
759
+ this.eventLoopHistogram = null;
760
+ }
761
+ }
762
+ /**
763
+ * Collects current worker metrics.
764
+ *
765
+ * This method calculates CPU usage since the last collection,
766
+ * reads memory usage, and gets event loop delay statistics.
767
+ * The event loop histogram is reset after each collection for
768
+ * accurate per-interval measurements.
769
+ *
770
+ * @returns Current worker metrics snapshot
771
+ */
772
+ collect() {
773
+ const memoryUsage = process.memoryUsage();
774
+ const cpuUsage = process.cpuUsage();
775
+ const now = node_perf_hooks.performance.now();
776
+ const cpuDelta = {
777
+ user: cpuUsage.user - this.lastCpuUsage.user,
778
+ system: cpuUsage.system - this.lastCpuUsage.system
779
+ };
780
+ const timeDelta = (now - this.lastCpuTime) * 1e3;
781
+ const cpuPercent = timeDelta > 0 ? (cpuDelta.user + cpuDelta.system) / timeDelta * 100 : 0;
782
+ this.lastCpuUsage = cpuUsage;
783
+ this.lastCpuTime = now;
784
+ let eventLoopLagMs = 0;
785
+ if (this.eventLoopHistogram) {
786
+ eventLoopLagMs = this.eventLoopHistogram.mean / 1e6;
787
+ this.eventLoopHistogram.reset();
788
+ }
789
+ return {
790
+ memory_heap_used: memoryUsage.heapUsed,
791
+ memory_heap_total: memoryUsage.heapTotal,
792
+ memory_rss: memoryUsage.rss,
793
+ memory_external: memoryUsage.external,
794
+ cpu_user_micros: cpuUsage.user,
795
+ cpu_system_micros: cpuUsage.system,
796
+ cpu_percent: Math.min(cpuPercent, 100),
797
+ event_loop_lag_ms: eventLoopLagMs,
798
+ uptime_seconds: Math.floor((Date.now() - this.startTime) / 1e3),
799
+ timestamp_ms: Date.now(),
800
+ runtime: "node"
801
+ };
802
+ }
803
+ };
804
+
805
+ //#endregion
806
+ //#region src/otel-worker-gauges.ts
807
+ let registeredGauges = false;
808
+ let metricsCollector = null;
809
+ let registeredMeter = null;
810
+ let registeredBatchCallback = null;
811
+ let registeredObservables = [];
812
+ function registerWorkerGauges(meter$1, options) {
813
+ if (registeredGauges) return;
814
+ const { workerId, workerName } = options;
815
+ const baseAttributes = {
816
+ "worker.id": workerId,
817
+ ...workerName && { "worker.name": workerName }
818
+ };
819
+ metricsCollector = new WorkerMetricsCollector();
820
+ const memoryHeapUsed = meter$1.createObservableGauge("iii.worker.memory.heap_used", {
821
+ description: "Worker heap memory used in bytes",
822
+ unit: "bytes"
823
+ });
824
+ const memoryHeapTotal = meter$1.createObservableGauge("iii.worker.memory.heap_total", {
825
+ description: "Worker total heap memory in bytes",
826
+ unit: "bytes"
827
+ });
828
+ const memoryRss = meter$1.createObservableGauge("iii.worker.memory.rss", {
829
+ description: "Worker resident set size in bytes",
830
+ unit: "bytes"
831
+ });
832
+ const memoryExternal = meter$1.createObservableGauge("iii.worker.memory.external", {
833
+ description: "Worker external memory in bytes",
834
+ unit: "bytes"
835
+ });
836
+ const cpuPercent = meter$1.createObservableGauge("iii.worker.cpu.percent", {
837
+ description: "Worker CPU usage percentage",
838
+ unit: "%"
839
+ });
840
+ const cpuUserMicros = meter$1.createObservableGauge("iii.worker.cpu.user_micros", {
841
+ description: "Worker CPU user time in microseconds",
842
+ unit: "us"
843
+ });
844
+ const cpuSystemMicros = meter$1.createObservableGauge("iii.worker.cpu.system_micros", {
845
+ description: "Worker CPU system time in microseconds",
846
+ unit: "us"
847
+ });
848
+ const eventLoopLag = meter$1.createObservableGauge("iii.worker.event_loop.lag_ms", {
849
+ description: "Worker event loop lag in milliseconds",
850
+ unit: "ms"
851
+ });
852
+ const uptimeSeconds = meter$1.createObservableGauge("iii.worker.uptime_seconds", {
853
+ description: "Worker uptime in seconds",
854
+ unit: "s"
855
+ });
856
+ const batchCallback = (observableResult) => {
857
+ if (!metricsCollector) return;
858
+ const metrics$1 = metricsCollector.collect();
859
+ if (metrics$1.memory_heap_used !== void 0) observableResult.observe(memoryHeapUsed, metrics$1.memory_heap_used, baseAttributes);
860
+ if (metrics$1.memory_heap_total !== void 0) observableResult.observe(memoryHeapTotal, metrics$1.memory_heap_total, baseAttributes);
861
+ if (metrics$1.memory_rss !== void 0) observableResult.observe(memoryRss, metrics$1.memory_rss, baseAttributes);
862
+ if (metrics$1.memory_external !== void 0) observableResult.observe(memoryExternal, metrics$1.memory_external, baseAttributes);
863
+ if (metrics$1.cpu_percent !== void 0) observableResult.observe(cpuPercent, metrics$1.cpu_percent, baseAttributes);
864
+ if (metrics$1.cpu_user_micros !== void 0) observableResult.observe(cpuUserMicros, metrics$1.cpu_user_micros, baseAttributes);
865
+ if (metrics$1.cpu_system_micros !== void 0) observableResult.observe(cpuSystemMicros, metrics$1.cpu_system_micros, baseAttributes);
866
+ if (metrics$1.event_loop_lag_ms !== void 0) observableResult.observe(eventLoopLag, metrics$1.event_loop_lag_ms, baseAttributes);
867
+ if (metrics$1.uptime_seconds !== void 0) observableResult.observe(uptimeSeconds, metrics$1.uptime_seconds, baseAttributes);
868
+ };
869
+ meter$1.addBatchObservableCallback(batchCallback, [
870
+ memoryHeapUsed,
871
+ memoryHeapTotal,
872
+ memoryRss,
873
+ memoryExternal,
874
+ cpuPercent,
875
+ cpuUserMicros,
876
+ cpuSystemMicros,
877
+ eventLoopLag,
878
+ uptimeSeconds
879
+ ]);
880
+ registeredMeter = meter$1;
881
+ registeredBatchCallback = batchCallback;
882
+ registeredObservables = [
883
+ memoryHeapUsed,
884
+ memoryHeapTotal,
885
+ memoryRss,
886
+ memoryExternal,
887
+ cpuPercent,
888
+ cpuUserMicros,
889
+ cpuSystemMicros,
890
+ eventLoopLag,
891
+ uptimeSeconds
892
+ ];
893
+ registeredGauges = true;
894
+ }
895
+ function stopWorkerGauges() {
896
+ if (registeredMeter && registeredBatchCallback) registeredMeter.removeBatchObservableCallback(registeredBatchCallback, registeredObservables);
897
+ if (metricsCollector) {
898
+ metricsCollector.stopMonitoring();
899
+ metricsCollector = null;
900
+ }
901
+ registeredMeter = null;
902
+ registeredBatchCallback = null;
903
+ registeredObservables = [];
904
+ registeredGauges = false;
905
+ }
906
+
907
+ //#endregion
908
+ Object.defineProperty(exports, 'DEFAULT_BRIDGE_RECONNECTION_CONFIG', {
909
+ enumerable: true,
910
+ get: function () {
911
+ return DEFAULT_BRIDGE_RECONNECTION_CONFIG;
912
+ }
913
+ });
914
+ Object.defineProperty(exports, 'DEFAULT_INVOCATION_TIMEOUT_MS', {
915
+ enumerable: true,
916
+ get: function () {
917
+ return DEFAULT_INVOCATION_TIMEOUT_MS;
918
+ }
919
+ });
920
+ Object.defineProperty(exports, 'EngineFunctions', {
921
+ enumerable: true,
922
+ get: function () {
923
+ return EngineFunctions;
924
+ }
925
+ });
926
+ Object.defineProperty(exports, 'EngineTriggers', {
927
+ enumerable: true,
928
+ get: function () {
929
+ return EngineTriggers;
930
+ }
931
+ });
932
+ Object.defineProperty(exports, 'LogFunctions', {
933
+ enumerable: true,
934
+ get: function () {
935
+ return LogFunctions;
936
+ }
937
+ });
938
+ Object.defineProperty(exports, 'WorkerMetricsCollector', {
939
+ enumerable: true,
940
+ get: function () {
941
+ return WorkerMetricsCollector;
942
+ }
943
+ });
944
+ Object.defineProperty(exports, 'currentSpanId', {
945
+ enumerable: true,
946
+ get: function () {
947
+ return currentSpanId;
948
+ }
949
+ });
950
+ Object.defineProperty(exports, 'currentTraceId', {
951
+ enumerable: true,
952
+ get: function () {
953
+ return currentTraceId;
954
+ }
955
+ });
956
+ Object.defineProperty(exports, 'extractBaggage', {
957
+ enumerable: true,
958
+ get: function () {
959
+ return extractBaggage;
960
+ }
961
+ });
962
+ Object.defineProperty(exports, 'extractContext', {
963
+ enumerable: true,
964
+ get: function () {
965
+ return extractContext;
966
+ }
967
+ });
968
+ Object.defineProperty(exports, 'extractTraceparent', {
969
+ enumerable: true,
970
+ get: function () {
971
+ return extractTraceparent;
972
+ }
973
+ });
974
+ Object.defineProperty(exports, 'getAllBaggage', {
975
+ enumerable: true,
976
+ get: function () {
977
+ return getAllBaggage;
978
+ }
979
+ });
980
+ Object.defineProperty(exports, 'getBaggageEntry', {
981
+ enumerable: true,
982
+ get: function () {
983
+ return getBaggageEntry;
984
+ }
985
+ });
986
+ Object.defineProperty(exports, 'getLogger', {
987
+ enumerable: true,
988
+ get: function () {
989
+ return getLogger;
990
+ }
991
+ });
992
+ Object.defineProperty(exports, 'getMeter', {
993
+ enumerable: true,
994
+ get: function () {
995
+ return getMeter;
996
+ }
997
+ });
998
+ Object.defineProperty(exports, 'getTracer', {
999
+ enumerable: true,
1000
+ get: function () {
1001
+ return getTracer;
1002
+ }
1003
+ });
1004
+ Object.defineProperty(exports, 'initOtel', {
1005
+ enumerable: true,
1006
+ get: function () {
1007
+ return initOtel;
1008
+ }
1009
+ });
1010
+ Object.defineProperty(exports, 'injectBaggage', {
1011
+ enumerable: true,
1012
+ get: function () {
1013
+ return injectBaggage;
1014
+ }
1015
+ });
1016
+ Object.defineProperty(exports, 'injectTraceparent', {
1017
+ enumerable: true,
1018
+ get: function () {
1019
+ return injectTraceparent;
1020
+ }
1021
+ });
1022
+ Object.defineProperty(exports, 'registerWorkerGauges', {
1023
+ enumerable: true,
1024
+ get: function () {
1025
+ return registerWorkerGauges;
1026
+ }
1027
+ });
1028
+ Object.defineProperty(exports, 'removeBaggageEntry', {
1029
+ enumerable: true,
1030
+ get: function () {
1031
+ return removeBaggageEntry;
1032
+ }
1033
+ });
1034
+ Object.defineProperty(exports, 'safeStringify', {
1035
+ enumerable: true,
1036
+ get: function () {
1037
+ return safeStringify;
1038
+ }
1039
+ });
1040
+ Object.defineProperty(exports, 'setBaggageEntry', {
1041
+ enumerable: true,
1042
+ get: function () {
1043
+ return setBaggageEntry;
1044
+ }
1045
+ });
1046
+ Object.defineProperty(exports, 'shutdownOtel', {
1047
+ enumerable: true,
1048
+ get: function () {
1049
+ return shutdownOtel;
1050
+ }
1051
+ });
1052
+ Object.defineProperty(exports, 'stopWorkerGauges', {
1053
+ enumerable: true,
1054
+ get: function () {
1055
+ return stopWorkerGauges;
1056
+ }
1057
+ });
1058
+ Object.defineProperty(exports, 'withSpan', {
1059
+ enumerable: true,
1060
+ get: function () {
1061
+ return withSpan;
1062
+ }
1063
+ });
1064
+ //# sourceMappingURL=otel-worker-gauges-bAp_yKcU.cjs.map