@m4trix/core 0.9.0 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -1,26 +1,9 @@
1
1
  'use strict';
2
2
 
3
- var react = require('react');
4
- var socket_ioClient = require('socket.io-client');
5
- var lit = require('lit');
6
- var decorators_js = require('lit/decorators.js');
7
- var ref_js = require('lit/directives/ref.js');
8
- var animejs = require('animejs');
9
3
  var messages = require('@langchain/core/messages');
10
4
  var effect = require('effect');
11
5
  var crypto$1 = require('crypto');
12
6
 
13
- var __defProp = Object.defineProperty;
14
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
15
- var __decorateClass = (decorators, target, key, kind) => {
16
- var result = kind > 1 ? void 0 : kind ? __getOwnPropDesc(target, key) : target;
17
- for (var i = decorators.length - 1, decorator; i >= 0; i--)
18
- if (decorator = decorators[i])
19
- result = (kind ? decorator(target, key, result) : decorator(result)) || result;
20
- if (kind && result)
21
- __defProp(target, key, result);
22
- return result;
23
- };
24
7
  var __accessCheck = (obj, member, msg) => {
25
8
  if (!member.has(obj))
26
9
  throw TypeError("Cannot " + msg);
@@ -40,1608 +23,6 @@ var __privateSet = (obj, member, value, setter) => {
40
23
  return value;
41
24
  };
42
25
 
43
- // src/utility/Logger.ts
44
- var _Logger = class _Logger {
45
- constructor(namespace = "") {
46
- this.namespace = namespace;
47
- }
48
- static enableGlobalLogging() {
49
- _Logger.globalEnabled = true;
50
- }
51
- static disableGlobalLogging() {
52
- _Logger.globalEnabled = false;
53
- }
54
- formatPrefix() {
55
- return this.namespace ? `[${this.namespace}]` : "";
56
- }
57
- logIfEnabled(level, ...args) {
58
- if (!_Logger.globalEnabled)
59
- return;
60
- const prefix = this.formatPrefix();
61
- if (prefix) {
62
- console[level](prefix, ...args);
63
- } else {
64
- console[level](...args);
65
- }
66
- }
67
- log(...args) {
68
- this.logIfEnabled("log", ...args);
69
- }
70
- debug(...args) {
71
- this.logIfEnabled("debug", ...args);
72
- }
73
- info(...args) {
74
- this.logIfEnabled("info", ...args);
75
- }
76
- warn(...args) {
77
- this.logIfEnabled("warn", ...args);
78
- }
79
- error(...args) {
80
- this.logIfEnabled("error", ...args);
81
- }
82
- };
83
- _Logger.globalEnabled = false;
84
- var Logger = _Logger;
85
-
86
- // src/react/adapter/VoiceEndpointAdapter.ts
87
- var VoiceEndpointAdapter = class {
88
- constructor(config) {
89
- this.logger = new Logger("SuTr > EndpointAdapter");
90
- this.config = config;
91
- }
92
- };
93
- var BaseVoiceEndpointAdapter = class extends VoiceEndpointAdapter {
94
- constructor(config) {
95
- super(config);
96
- }
97
- /**
98
- * Send a voice file to the API endpoint and return a Pump stream of audio chunks
99
- */
100
- async sendVoiceFile({
101
- blob,
102
- metadata
103
- }) {
104
- const formData = new FormData();
105
- formData.append("audio", blob);
106
- if (metadata) {
107
- formData.append("metadata", JSON.stringify(metadata));
108
- }
109
- this.logger.debug("Sending voice file to", this.config.endpoint, formData);
110
- const response = await fetch(
111
- `${this.config.baseUrl || ""}${this.config.endpoint}`,
112
- {
113
- method: "POST",
114
- headers: this.config.headers,
115
- body: formData
116
- }
117
- );
118
- if (!response.ok) {
119
- throw new Error(`API error: ${response.status} ${await response.text()}`);
120
- }
121
- if (!response.body) {
122
- throw new Error("No response body");
123
- }
124
- return response;
125
- }
126
- };
127
-
128
- // src/react/utility/audio/InputAudioController.ts
129
- var InputAudioController = class {
130
- constructor() {
131
- this.logger = new Logger("@m4trix/core > InputAudioController");
132
- }
133
- };
134
-
135
- // src/react/utility/audio/WebAudioInputAudioController.ts
136
- var DEFAULT_SLICING_INTERVAL = 3e3;
137
- var WebAudioInputAudioController = class extends InputAudioController {
138
- constructor(audioConfig = {}) {
139
- super();
140
- this.audioConfig = audioConfig;
141
- // ─── Recording state ─────────────────────────────────────────────────────
142
- this.audioContextState = {
143
- context: null,
144
- source: null,
145
- analyser: null
146
- };
147
- this.mediaRecorder = null;
148
- this.recordedChunks = [];
149
- this.recordingStream = null;
150
- }
151
- get audioContext() {
152
- return this.audioContextState.context;
153
- }
154
- async createAudioContext() {
155
- const context = new AudioContext({
156
- sampleRate: this.audioConfig.sampleRate || 16e3,
157
- latencyHint: "interactive"
158
- });
159
- const analyser = context.createAnalyser();
160
- analyser.fftSize = 2048;
161
- return { context, source: null, analyser };
162
- }
163
- async cleanupAudioContext() {
164
- this.logger.debug("Cleaning up audio context");
165
- const { source, context } = this.audioContextState;
166
- if (source)
167
- source.disconnect();
168
- if (context)
169
- await context.close();
170
- this.audioContextState = { context: null, source: null, analyser: null };
171
- }
172
- async startRecording({
173
- onRecordedChunk,
174
- onError
175
- } = {}) {
176
- try {
177
- this.logger.debug("Starting recording");
178
- this.recordedChunks = [];
179
- const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
180
- this.recordingStream = stream;
181
- if (!this.audioContextState.context) {
182
- this.audioContextState = await this.createAudioContext();
183
- }
184
- this.mediaRecorder = new MediaRecorder(stream, {
185
- mimeType: "audio/webm;codecs=opus"
186
- });
187
- this.mediaRecorder.ondataavailable = (e) => {
188
- if (e.data.size > 0) {
189
- this.recordedChunks.push(e.data);
190
- onRecordedChunk?.(e.data);
191
- this.logger.debug("Recorded chunk", e.data.size);
192
- }
193
- };
194
- this.mediaRecorder.start(DEFAULT_SLICING_INTERVAL);
195
- this.logger.debug("MediaRecorder started");
196
- } catch (err) {
197
- const error = err instanceof Error ? err : new Error("Failed to start recording");
198
- this.logger.error(error);
199
- onError?.(error);
200
- }
201
- }
202
- async stopRecording({
203
- onRecordingCompleted
204
- } = {}) {
205
- this.logger.debug("Stopping recording");
206
- if (!this.mediaRecorder || this.mediaRecorder.state === "inactive")
207
- return;
208
- await new Promise((resolve) => {
209
- this.mediaRecorder.onstop = async () => {
210
- if (this.recordedChunks.length) {
211
- const blob = new Blob(this.recordedChunks, { type: "audio/webm" });
212
- onRecordingCompleted?.(blob);
213
- this.logger.debug("Recording completed", blob.size);
214
- }
215
- this.recordingStream?.getTracks().forEach((t) => t.stop());
216
- this.recordingStream = null;
217
- await this.cleanupAudioContext();
218
- resolve();
219
- };
220
- this.mediaRecorder.stop();
221
- });
222
- }
223
- /**
224
- * Cleans up all audio recording resources.
225
- */
226
- cleanup() {
227
- this.cleanupAudioContext();
228
- if (this.mediaRecorder && this.mediaRecorder.state !== "inactive") {
229
- this.mediaRecorder.stop();
230
- }
231
- if (this.recordingStream) {
232
- this.recordingStream.getTracks().forEach((t) => t.stop());
233
- this.recordingStream = null;
234
- }
235
- }
236
- };
237
-
238
- // src/react/utility/audio/OutputAudioController.ts
239
- var OutputAudioController = class {
240
- constructor(loggerName) {
241
- this.logger = new Logger(loggerName);
242
- }
243
- };
244
-
245
- // src/react/utility/audio/AudioElementOutputAudioController.ts
246
- var AudioElementOutputAudioController = class extends OutputAudioController {
247
- constructor() {
248
- super("@m4trix/core > WebApiOutputAudioController");
249
- // ─── Playback state ──────────────────────────────────────────────────────
250
- this.currentHtmlAudio = null;
251
- this.currentAudioUrl = null;
252
- }
253
- // ─── One-shot playback ────────────────────────────────────────────────────
254
- /**
255
- * Play either a Blob or a URL string.
256
- * Uses <audio> under the hood for maximum browser compatibility.
257
- */
258
- async playAudio({
259
- source,
260
- onComplete
261
- }) {
262
- if (this.currentHtmlAudio) {
263
- this.currentHtmlAudio.pause();
264
- this.currentHtmlAudio.src = "";
265
- if (this.currentAudioUrl && source instanceof Blob) {
266
- URL.revokeObjectURL(this.currentAudioUrl);
267
- }
268
- }
269
- const audio = new Audio();
270
- this.currentHtmlAudio = audio;
271
- let url;
272
- if (source instanceof Blob) {
273
- url = URL.createObjectURL(source);
274
- this.currentAudioUrl = url;
275
- audio.onended = () => {
276
- URL.revokeObjectURL(url);
277
- onComplete?.();
278
- };
279
- } else {
280
- url = source;
281
- }
282
- audio.src = url;
283
- try {
284
- await audio.play();
285
- } catch (err) {
286
- this.logger.error("Playback failed, user gesture may be required", err);
287
- }
288
- }
289
- // ─── Streaming playback ──────────────────────────────────────────────────
290
- /**
291
- * Stream audio from a Response via MediaSource Extensions.
292
- * @param params.response The fetch Response whose body is an audio stream
293
- * @param params.mimeCodec MIME type+codec string, e.g. 'audio/mpeg'
294
- * @param params.onComplete Optional callback once the stream ends
295
- */
296
- async playAudioStream({
297
- response,
298
- mimeCodec = "audio/mpeg",
299
- onComplete
300
- }) {
301
- if (!response.ok || !response.body) {
302
- throw new Error(`Invalid response (${response.status})`);
303
- }
304
- if (typeof MediaSource === "undefined" || !MediaSource.isTypeSupported(mimeCodec)) {
305
- throw new Error(`Unsupported MIME type or codec: ${mimeCodec}`);
306
- }
307
- await this.stopPlayback();
308
- const mediaSource = new MediaSource();
309
- const url = URL.createObjectURL(mediaSource);
310
- this.currentAudioUrl = url;
311
- const audio = new Audio(url);
312
- this.currentHtmlAudio = audio;
313
- audio.autoplay = true;
314
- audio.onended = () => {
315
- URL.revokeObjectURL(url);
316
- this.currentAudioUrl = null;
317
- onComplete?.();
318
- };
319
- mediaSource.addEventListener(
320
- "sourceopen",
321
- () => {
322
- const sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
323
- const reader = response.body.getReader();
324
- const pump = async () => {
325
- const { done, value } = await reader.read();
326
- if (done) {
327
- mediaSource.endOfStream();
328
- return;
329
- }
330
- if (value) {
331
- sourceBuffer.appendBuffer(value);
332
- }
333
- if (sourceBuffer.updating) {
334
- sourceBuffer.addEventListener("updateend", pump, { once: true });
335
- } else {
336
- pump();
337
- }
338
- };
339
- pump();
340
- },
341
- { once: true }
342
- );
343
- try {
344
- await audio.play();
345
- } catch (err) {
346
- this.logger.error(
347
- "Streaming playback failed, user gesture may be required",
348
- err
349
- );
350
- }
351
- }
352
- // ─── Chunk-based streaming playback ─────────────────────────────────────
353
- /**
354
- * Initialize a streaming audio context for chunk-based playback.
355
- * This creates the necessary MediaSource and SourceBuffer for subsequent chunk additions.
356
- * Returns functions to add chunks and end the stream, encapsulated in a closure.
357
- *
358
- * @param mimeCodec MIME type+codec string, e.g. 'audio/mpeg'
359
- * @param onComplete Optional callback once the stream ends
360
- * @returns Object containing functions to add chunks and end the stream
361
- */
362
- async initializeChunkStream({
363
- onComplete,
364
- mimeCodec = "audio/mpeg"
365
- }) {
366
- this.logger.debug(`Initializing chunk stream with codec: ${mimeCodec}`);
367
- if (typeof MediaSource === "undefined") {
368
- throw new Error("MediaSource API is not supported in this browser");
369
- }
370
- if (!MediaSource.isTypeSupported(mimeCodec)) {
371
- this.logger.warn(
372
- `Codec ${mimeCodec} not supported, falling back to standard audio/mpeg`
373
- );
374
- mimeCodec = "audio/mpeg";
375
- if (!MediaSource.isTypeSupported(mimeCodec)) {
376
- throw new Error(
377
- "Neither the specified codec nor the fallback codec are supported"
378
- );
379
- }
380
- }
381
- await this.stopPlayback();
382
- const mediaSource = new MediaSource();
383
- let sourceBuffer = null;
384
- const url = URL.createObjectURL(mediaSource);
385
- this.currentAudioUrl = url;
386
- const audio = new Audio(url);
387
- this.currentHtmlAudio = audio;
388
- audio.autoplay = false;
389
- audio.controls = true;
390
- audio.style.display = "none";
391
- document.body.appendChild(audio);
392
- let playbackStarted = false;
393
- let hasReceivedFirstChunk = false;
394
- let receivedChunksCount = 0;
395
- const pendingChunks = [];
396
- let isProcessingQueue = false;
397
- this.logger.debug("Waiting for MediaSource to open...");
398
- await new Promise((resolve, reject) => {
399
- const timeout = setTimeout(() => {
400
- reject(new Error("MediaSource failed to open (timeout)"));
401
- }, 5e3);
402
- mediaSource.addEventListener(
403
- "sourceopen",
404
- () => {
405
- clearTimeout(timeout);
406
- this.logger.debug("MediaSource open event received");
407
- try {
408
- sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
409
- if (mediaSource.duration === Infinity || isNaN(mediaSource.duration)) {
410
- mediaSource.duration = 1e3;
411
- }
412
- this.logger.debug("SourceBuffer created successfully");
413
- resolve();
414
- } catch (err) {
415
- reject(new Error(`Failed to create SourceBuffer: ${err}`));
416
- }
417
- },
418
- { once: true }
419
- );
420
- });
421
- const logger = this.logger;
422
- const processQueue = async () => {
423
- if (!sourceBuffer || pendingChunks.length === 0 || isProcessingQueue) {
424
- return;
425
- }
426
- isProcessingQueue = true;
427
- try {
428
- while (pendingChunks.length > 0) {
429
- if (sourceBuffer.updating) {
430
- await new Promise((resolve) => {
431
- sourceBuffer.addEventListener("updateend", () => resolve(), {
432
- once: true
433
- });
434
- });
435
- }
436
- const nextChunk = pendingChunks.shift();
437
- if (!nextChunk)
438
- continue;
439
- try {
440
- sourceBuffer.appendBuffer(nextChunk);
441
- logger.debug(
442
- `Processed queued chunk of size ${nextChunk.byteLength}`
443
- );
444
- if (!playbackStarted && hasReceivedFirstChunk) {
445
- await tryStartPlayback();
446
- }
447
- await new Promise((resolve) => {
448
- sourceBuffer.addEventListener("updateend", () => resolve(), {
449
- once: true
450
- });
451
- });
452
- } catch (err) {
453
- logger.error("Error appending queued chunk to source buffer", err);
454
- }
455
- }
456
- } finally {
457
- isProcessingQueue = false;
458
- }
459
- };
460
- const tryStartPlayback = async () => {
461
- if (playbackStarted)
462
- return;
463
- playbackStarted = true;
464
- logger.debug("Attempting to start audio playback...");
465
- if (receivedChunksCount < 3 && audio.buffered.length > 0 && audio.buffered.end(0) < 0.5) {
466
- logger.debug("Not enough data buffered yet, delaying playback");
467
- return;
468
- }
469
- try {
470
- if (audio.readyState === 0) {
471
- logger.debug(
472
- "Audio element not ready yet, waiting for canplay event"
473
- );
474
- await new Promise((resolve) => {
475
- audio.addEventListener("canplay", () => resolve(), { once: true });
476
- });
477
- }
478
- await audio.play();
479
- logger.debug("Successfully started audio playback");
480
- } catch (err) {
481
- logger.error("Failed to start playback", err);
482
- document.addEventListener(
483
- "click",
484
- async () => {
485
- try {
486
- await audio.play();
487
- logger.debug("Started playback after user interaction");
488
- } catch (innerErr) {
489
- logger.error(
490
- "Still failed to play after user interaction",
491
- innerErr
492
- );
493
- }
494
- },
495
- { once: true }
496
- );
497
- }
498
- };
499
- const addChunkToStream = async (chunk) => {
500
- if (!sourceBuffer) {
501
- throw new Error(
502
- "Streaming context was closed or not properly initialized."
503
- );
504
- }
505
- let arrayBufferChunk;
506
- if (chunk instanceof Blob) {
507
- logger.debug("Converting Blob to ArrayBuffer");
508
- arrayBufferChunk = await chunk.arrayBuffer();
509
- } else {
510
- arrayBufferChunk = chunk;
511
- }
512
- if (!arrayBufferChunk || arrayBufferChunk.byteLength === 0) {
513
- logger.warn("Received empty chunk, skipping");
514
- return;
515
- }
516
- if (!hasReceivedFirstChunk) {
517
- hasReceivedFirstChunk = true;
518
- logger.debug(
519
- `First chunk received, size: ${arrayBufferChunk.byteLength} bytes`
520
- );
521
- }
522
- receivedChunksCount++;
523
- pendingChunks.push(arrayBufferChunk);
524
- logger.debug(
525
- `Added chunk #${receivedChunksCount} to queue (size: ${arrayBufferChunk.byteLength} bytes)`
526
- );
527
- await processQueue();
528
- if (!playbackStarted && hasReceivedFirstChunk && receivedChunksCount >= 3) {
529
- await tryStartPlayback();
530
- }
531
- };
532
- const endChunkStream = () => {
533
- if (mediaSource && mediaSource.readyState === "open") {
534
- try {
535
- if (pendingChunks.length > 0 || sourceBuffer && sourceBuffer.updating) {
536
- logger.debug("Waiting for pending chunks before ending stream");
537
- setTimeout(() => endChunkStream(), 200);
538
- return;
539
- }
540
- if (hasReceivedFirstChunk) {
541
- mediaSource.endOfStream();
542
- logger.debug("MediaSource stream ended successfully");
543
- } else {
544
- logger.warn("Stream ended without receiving any chunks");
545
- }
546
- } catch (err) {
547
- logger.error("Error ending MediaSource stream", err);
548
- }
549
- }
550
- audio.onended = null;
551
- if (audio.parentNode) {
552
- audio.parentNode.removeChild(audio);
553
- }
554
- if (this.currentAudioUrl === url) {
555
- this.currentAudioUrl = null;
556
- URL.revokeObjectURL(url);
557
- }
558
- sourceBuffer = null;
559
- };
560
- audio.onended = () => {
561
- logger.debug("Audio playback completed");
562
- endChunkStream();
563
- onComplete?.();
564
- };
565
- return {
566
- addChunkToStream,
567
- endChunkStream
568
- };
569
- }
570
- /**
571
- * Stop any ongoing HTMLAudioElement playback.
572
- */
573
- async stopPlayback() {
574
- if (this.currentHtmlAudio) {
575
- try {
576
- this.currentHtmlAudio.pause();
577
- this.currentHtmlAudio.src = "";
578
- } catch (err) {
579
- this.logger.error("Error stopping playback", err);
580
- }
581
- this.currentHtmlAudio = null;
582
- }
583
- if (this.currentAudioUrl) {
584
- URL.revokeObjectURL(this.currentAudioUrl);
585
- this.currentAudioUrl = null;
586
- }
587
- }
588
- /**
589
- * Cleans up all audio playback resources.
590
- */
591
- cleanup() {
592
- this.stopPlayback();
593
- }
594
- };
595
-
596
- // src/react/hooks/use-conversation/useConversation.ts
597
- Logger.enableGlobalLogging();
598
- function useConversation(endpoint, {
599
- onStartRecording,
600
- onStopRecording,
601
- onReceive,
602
- autoPlay = true,
603
- downstreamMode = "STREAM",
604
- onError,
605
- audioConfig = {},
606
- requestData = {},
607
- endpointConfig = {}
608
- }) {
609
- const { current: logger } = react.useRef(
610
- new Logger("@m4trix/core > useConversation")
611
- );
612
- const inputAudioControllerRef = react.useRef(void 0);
613
- const outputAudioControllerRef = react.useRef(
614
- void 0
615
- );
616
- const endpointAdapterRef = react.useRef(
617
- void 0
618
- );
619
- const [voiceAgentState, setVoiceAgentState] = react.useState("READY");
620
- const [error, setError] = react.useState(null);
621
- const handleError = react.useCallback(
622
- (state2, err) => {
623
- setError(err);
624
- logger.error(`Error during ${state2}:`, err);
625
- onError?.(state2, err);
626
- },
627
- [onError]
628
- );
629
- const startRecording = react.useCallback(() => {
630
- if (inputAudioControllerRef.current) {
631
- try {
632
- logger.debug("Starting recording");
633
- setVoiceAgentState("RECORDING");
634
- inputAudioControllerRef.current.startRecording({
635
- onError: (err) => {
636
- handleError("RECORDING", err);
637
- }
638
- });
639
- onStartRecording?.();
640
- } catch (err) {
641
- if (err instanceof Error) {
642
- handleError("RECORDING", err);
643
- }
644
- }
645
- }
646
- }, [onStartRecording, handleError]);
647
- const stopRecording = react.useCallback(async () => {
648
- if (inputAudioControllerRef.current) {
649
- try {
650
- logger.debug("Stopping recording");
651
- await inputAudioControllerRef.current.stopRecording({
652
- onRecordingCompleted: async (allData) => {
653
- setVoiceAgentState("PROCESSING");
654
- try {
655
- const response = await endpointAdapterRef.current?.sendVoiceFile({
656
- blob: allData,
657
- metadata: requestData
658
- });
659
- if (!response) {
660
- throw new Error("No response received from endpoint");
661
- }
662
- setVoiceAgentState("RESPONDING");
663
- if (autoPlay) {
664
- if (downstreamMode === "STREAM") {
665
- await outputAudioControllerRef.current?.playAudioStream({
666
- response,
667
- onComplete: () => {
668
- setVoiceAgentState("READY");
669
- }
670
- });
671
- } else if (downstreamMode === "DOWNLOAD") {
672
- const responseBlob = await response.blob();
673
- await outputAudioControllerRef.current?.playAudio({
674
- source: responseBlob,
675
- onComplete: () => {
676
- setVoiceAgentState("READY");
677
- }
678
- });
679
- }
680
- } else {
681
- setVoiceAgentState("READY");
682
- }
683
- onReceive?.(
684
- allData,
685
- async () => {
686
- if (outputAudioControllerRef.current) {
687
- if (downstreamMode === "STREAM") {
688
- return outputAudioControllerRef.current.playAudioStream({
689
- response,
690
- onComplete: () => {
691
- setVoiceAgentState("READY");
692
- }
693
- });
694
- } else {
695
- const responseBlob = await response.blob();
696
- return outputAudioControllerRef.current.playAudio({
697
- source: responseBlob,
698
- onComplete: () => {
699
- setVoiceAgentState("READY");
700
- }
701
- });
702
- }
703
- }
704
- },
705
- async () => {
706
- if (outputAudioControllerRef.current) {
707
- return outputAudioControllerRef.current.stopPlayback();
708
- }
709
- }
710
- );
711
- } catch (err) {
712
- if (err instanceof Error) {
713
- handleError("PROCESSING", err);
714
- }
715
- setVoiceAgentState("READY");
716
- }
717
- }
718
- });
719
- onStopRecording?.();
720
- } catch (err) {
721
- if (err instanceof Error) {
722
- handleError("RECORDING", err);
723
- }
724
- }
725
- }
726
- }, [
727
- onStopRecording,
728
- requestData,
729
- autoPlay,
730
- downstreamMode,
731
- handleError,
732
- onReceive
733
- ]);
734
- react.useEffect(() => {
735
- if (endpointAdapterRef.current) {
736
- return;
737
- }
738
- try {
739
- const endpointAdapter = endpointConfig.endpointAdapter ? endpointConfig.endpointAdapter : new BaseVoiceEndpointAdapter({
740
- baseUrl: endpointConfig.baseUrl,
741
- endpoint,
742
- headers: endpointConfig.headers
743
- });
744
- endpointAdapterRef.current = endpointAdapter;
745
- if (!inputAudioControllerRef.current) {
746
- inputAudioControllerRef.current = new WebAudioInputAudioController(
747
- audioConfig
748
- );
749
- }
750
- if (!outputAudioControllerRef.current) {
751
- outputAudioControllerRef.current = new AudioElementOutputAudioController();
752
- }
753
- } catch (err) {
754
- if (err instanceof Error) {
755
- handleError("READY", err);
756
- }
757
- }
758
- }, [endpoint, endpointConfig, audioConfig, handleError]);
759
- react.useEffect(() => {
760
- return () => {
761
- inputAudioControllerRef.current?.cleanup();
762
- outputAudioControllerRef.current?.cleanup();
763
- };
764
- }, []);
765
- return {
766
- startRecording,
767
- stopRecording,
768
- voiceAgentState,
769
- error,
770
- audioContext: inputAudioControllerRef.current?.audioContext || null
771
- };
772
- }
773
-
774
- // src/react/adapter/socket/VoiceSocketAdapter.ts
775
- var VoiceSocketAdapter = class {
776
- constructor(config) {
777
- this._isConnected = false;
778
- this.logger = new Logger("@m4trix/core > VoiceSocketAdapter");
779
- this.emitter = new Emitter();
780
- this.config = config;
781
- }
782
- on(event, listener) {
783
- this.emitter.on(event, listener);
784
- }
785
- off(event, listener) {
786
- this.emitter.off(event, listener);
787
- }
788
- once(event, listener) {
789
- this.emitter.once(event, listener);
790
- }
791
- emit(event, data) {
792
- this.emitter.emit(event, data);
793
- }
794
- isConnected() {
795
- return this._isConnected;
796
- }
797
- };
798
- var Emitter = class {
799
- constructor() {
800
- this.target = new EventTarget();
801
- }
802
- on(type, listener) {
803
- this.target.addEventListener(type, listener);
804
- }
805
- off(type, listener) {
806
- this.target.removeEventListener(type, listener);
807
- }
808
- once(type, listener) {
809
- const wrapper = (event) => {
810
- this.off(type, wrapper);
811
- listener(event.detail);
812
- };
813
- this.on(type, wrapper);
814
- }
815
- emit(type, detail) {
816
- this.target.dispatchEvent(new CustomEvent(type, { detail }));
817
- }
818
- };
819
- var VoiceSocketIOAdapter = class extends VoiceSocketAdapter {
820
- constructor(config) {
821
- super(config);
822
- this.socket = null;
823
- }
824
- async connect() {
825
- return new Promise((resolve, reject) => {
826
- if (!this.socket) {
827
- this.socket = socket_ioClient.io(this.config.baseUrl, {
828
- extraHeaders: this.config.headers,
829
- autoConnect: true
830
- });
831
- }
832
- this.socket.on("connect", () => {
833
- this._isConnected = true;
834
- this.logger.debug("Connected to socket");
835
- this.emit("connect");
836
- resolve();
837
- });
838
- this.socket.on("disconnect", () => {
839
- this._isConnected = false;
840
- this.emit("disconnect");
841
- this.logger.debug("Disconnected from socket");
842
- if (this.config.autoReconnect)
843
- this.connect();
844
- });
845
- this.socket.on("connect_error", (error) => {
846
- this.logger.error("Error connecting to socket", error);
847
- this.emit("error", error);
848
- reject(error);
849
- });
850
- this.socket.on("voice:chunk_received", (chunk) => {
851
- this.logger.debug("Received voice chunk", chunk.byteLength);
852
- this.onVoiceChunkReceived(chunk);
853
- });
854
- this.socket.on("voice:received_end_of_response_stream", () => {
855
- this.logger.debug("Received end of response stream");
856
- this.onReceivedEndOfResponseStream();
857
- });
858
- this.socket.on("voice:file_received", (blob) => {
859
- this.logger.debug("Received voice file");
860
- this.onVoiceFileReceived(blob);
861
- });
862
- this.socket.on("control-message", (message) => {
863
- this.logger.debug("Received control message", message);
864
- this.emit("control-message", message);
865
- });
866
- });
867
- }
868
- disconnect() {
869
- this.socket?.disconnect();
870
- this.socket = null;
871
- this._isConnected = false;
872
- }
873
- exposeSocket() {
874
- return this.socket;
875
- }
876
- async sendVoiceChunk(chunk, metadata) {
877
- this.logger.debug(
878
- "Sending voice chunk %i",
879
- chunk instanceof Blob ? chunk.size : chunk.byteLength
880
- );
881
- if (!this.socket || !this.isConnected)
882
- throw new Error("Socket not connected");
883
- let chunkToSend;
884
- if (chunk instanceof Blob) {
885
- chunkToSend = await chunk.arrayBuffer();
886
- } else {
887
- chunkToSend = chunk;
888
- }
889
- this.logger.debug("[Socket] Sending voice chunk", chunkToSend.byteLength);
890
- this.socket.emit("voice:send_chunk", chunkToSend, metadata);
891
- this.emit("chunk_sent", chunk);
892
- }
893
- sendVoiceFile(blob, metadata) {
894
- this.logger.debug("Sending voice file", blob, metadata);
895
- if (!this.socket || !this.isConnected)
896
- throw new Error("Socket not connected");
897
- this.socket.emit("voice:send_file", blob, metadata);
898
- this.emit("file-sent", blob);
899
- }
900
- commitVoiceMessage() {
901
- if (!this.socket || !this.isConnected)
902
- throw new Error("Socket not connected");
903
- this.socket.emit("voice:commit");
904
- }
905
- onVoiceChunkReceived(chunk) {
906
- this.emit("chunk-received", chunk);
907
- }
908
- onVoiceFileReceived(blob) {
909
- this.emit("file-received", blob);
910
- }
911
- onReceivedEndOfResponseStream() {
912
- this.emit("received-end-of-response-stream");
913
- }
914
- };
915
-
916
- // src/react/utility/audio/WebAudioOutputAudioController.ts
917
- var STREAM_SAMPLE_RATE = 24e3;
918
- var CHANNELS = 1;
919
- var SLICE_DURATION_S = 0.25;
920
- var FRAMES_PER_SLICE = Math.floor(STREAM_SAMPLE_RATE * SLICE_DURATION_S);
921
- var BYTES_PER_SLICE = FRAMES_PER_SLICE * 2;
922
- var SCHED_TOLERANCE = 0.05;
923
- var WebAudioOutputAudioController = class extends OutputAudioController {
924
- constructor() {
925
- super("@m4trix/core > WebAudioOutputAudioController");
926
- this.audioCtx = new AudioContext();
927
- this.gain = this.audioCtx.createGain();
928
- this.nextPlayTime = 0;
929
- this.activeSources = /* @__PURE__ */ new Set();
930
- this.userGestureHookAttached = false;
931
- this.gain.connect(this.audioCtx.destination);
932
- this.resetScheduler();
933
- }
934
- // ─────────────────────────────────────────────────────────────────────
935
- // One‑shot playback
936
- // ─────────────────────────────────────────────────────────────────────
937
- async playAudio({
938
- source,
939
- onComplete
940
- }) {
941
- await this.stopPlayback();
942
- const buf = await this.sourceToArrayBuffer(source);
943
- const decoded = await this.decode(buf);
944
- await this.ensureContextRunning();
945
- const src = this.createSource(decoded, this.audioCtx.currentTime);
946
- src.onended = () => {
947
- this.activeSources.delete(src);
948
- onComplete?.();
949
- };
950
- }
951
- async playAudioStream() {
952
- }
953
- // ─────────────────────────────────────────────────────────────────────
954
- // PCM streaming
955
- // ─────────────────────────────────────────────────────────────────────
956
- async initializeChunkStream({
957
- onComplete
958
- }) {
959
- await this.stopPlayback();
960
- await this.ensureContextRunning();
961
- this.resetScheduler();
962
- let streamEnded = false;
963
- let pending = new Uint8Array(0);
964
- const addChunkToStream = async (pkt) => {
965
- if (streamEnded) {
966
- this.logger.warn("Attempt to add chunk after stream ended \u2013 ignoring.");
967
- return;
968
- }
969
- const bytes = new Uint8Array(
970
- pkt instanceof Blob ? await pkt.arrayBuffer() : pkt
971
- );
972
- if (bytes.length === 0)
973
- return;
974
- const merged = new Uint8Array(pending.length + bytes.length);
975
- merged.set(pending);
976
- merged.set(bytes, pending.length);
977
- pending = merged;
978
- if (pending.length % 2 === 1)
979
- return;
980
- while (pending.length >= BYTES_PER_SLICE) {
981
- const sliceBytes = pending.slice(0, BYTES_PER_SLICE);
982
- pending = pending.slice(BYTES_PER_SLICE);
983
- const aligned = sliceBytes.buffer.slice(
984
- sliceBytes.byteOffset,
985
- sliceBytes.byteOffset + sliceBytes.byteLength
986
- );
987
- const int16 = new Int16Array(aligned);
988
- const buf = this.audioCtx.createBuffer(
989
- CHANNELS,
990
- int16.length,
991
- STREAM_SAMPLE_RATE
992
- );
993
- const data = buf.getChannelData(0);
994
- for (let i = 0; i < int16.length; i++)
995
- data[i] = int16[i] / 32768;
996
- this.scheduleBuffer(buf);
997
- }
998
- };
999
- const endChunkStream = () => {
1000
- if (streamEnded)
1001
- return;
1002
- streamEnded = true;
1003
- if (onComplete) {
1004
- if (this.activeSources.size === 0)
1005
- onComplete();
1006
- else {
1007
- const last = Array.from(this.activeSources).pop();
1008
- if (last) {
1009
- const prev = last.onended;
1010
- last.onended = (e) => {
1011
- if (prev)
1012
- prev.call(last, e);
1013
- onComplete();
1014
- };
1015
- }
1016
- }
1017
- }
1018
- };
1019
- return { addChunkToStream, endChunkStream };
1020
- }
1021
- // ─────────────────────────────────────────────────────────────────────
1022
- // Buffer scheduling helpers
1023
- // ─────────────────────────────────────────────────────────────────────
1024
- scheduleBuffer(buf) {
1025
- if (this.nextPlayTime < this.audioCtx.currentTime + SCHED_TOLERANCE) {
1026
- this.nextPlayTime = this.audioCtx.currentTime + SCHED_TOLERANCE;
1027
- }
1028
- this.createSource(buf, this.nextPlayTime);
1029
- this.nextPlayTime += buf.duration;
1030
- }
1031
- createSource(buf, when) {
1032
- const src = this.audioCtx.createBufferSource();
1033
- src.buffer = buf;
1034
- src.connect(this.gain);
1035
- src.start(when);
1036
- this.activeSources.add(src);
1037
- src.onended = () => {
1038
- this.activeSources.delete(src);
1039
- };
1040
- return src;
1041
- }
1042
- resetScheduler() {
1043
- this.nextPlayTime = this.audioCtx.currentTime;
1044
- }
1045
- // ─── External resource helpers ───────────────────────────────────────
1046
- sourceToArrayBuffer(src) {
1047
- return typeof src === "string" ? fetch(src).then((r) => {
1048
- if (!r.ok)
1049
- throw new Error(`${r.status}`);
1050
- return r.arrayBuffer();
1051
- }) : src.arrayBuffer();
1052
- }
1053
- decode(buf) {
1054
- return new Promise(
1055
- (res, rej) => this.audioCtx.decodeAudioData(buf, res, rej)
1056
- );
1057
- }
1058
- // ─── Lifecycle methods ───────────────────────────────────────────────
1059
- async stopPlayback() {
1060
- for (const src of this.activeSources) {
1061
- try {
1062
- src.stop();
1063
- } catch {
1064
- }
1065
- src.disconnect();
1066
- }
1067
- this.activeSources.clear();
1068
- this.resetScheduler();
1069
- }
1070
- cleanup() {
1071
- this.stopPlayback();
1072
- if (this.audioCtx.state !== "closed")
1073
- this.audioCtx.close();
1074
- }
1075
- // ─── Autoplay‑policy helper ──────────────────────────────────────────
1076
- async ensureContextRunning() {
1077
- if (this.audioCtx.state !== "suspended")
1078
- return;
1079
- try {
1080
- await this.audioCtx.resume();
1081
- } catch {
1082
- }
1083
- if (this.audioCtx.state === "running")
1084
- return;
1085
- if (!this.userGestureHookAttached) {
1086
- this.userGestureHookAttached = true;
1087
- const resume = async () => {
1088
- try {
1089
- await this.audioCtx.resume();
1090
- } catch {
1091
- }
1092
- if (this.audioCtx.state === "running")
1093
- document.removeEventListener("click", resume);
1094
- };
1095
- document.addEventListener("click", resume);
1096
- }
1097
- }
1098
- };
1099
-
1100
- // src/react/hooks/use-conversation/useSocketConversation.ts
1101
- Logger.enableGlobalLogging();
1102
- function useSocketConversation({
1103
- scope,
1104
- onStartRecording,
1105
- onStopRecording,
1106
- onReceive,
1107
- upstreamMode = "STREAM_WHILE_TALK",
1108
- onError,
1109
- audioConfig = {},
1110
- socketConfig = {}
1111
- }) {
1112
- const { current: logger } = react.useRef(
1113
- new Logger("SuTr > useSocketConversation")
1114
- );
1115
- const inputAudioControllerRef = react.useRef(void 0);
1116
- const outputAudioControllerRef = react.useRef(
1117
- void 0
1118
- );
1119
- const socketAdapterRef = react.useRef(void 0);
1120
- const [socket, setSocket] = react.useState(null);
1121
- const [voiceAgentState, setVoiceAgentState] = react.useState("READY");
1122
- const [error, setError] = react.useState(null);
1123
- const shouldStreamWhileTalk = upstreamMode === "STREAM_WHILE_TALK";
1124
- const handleError = react.useCallback(
1125
- (state2, err) => {
1126
- setError(err);
1127
- logger.error(`Error during ${state2}:`, err);
1128
- onError?.(state2, err);
1129
- },
1130
- [onError]
1131
- );
1132
- const subscribeToSocketEventsForChunkDownstreaming = react.useCallback(
1133
- async (socketAdapter) => {
1134
- logger.debug("Setting up audio stream for receiving chunks");
1135
- try {
1136
- const { addChunkToStream, endChunkStream } = await outputAudioControllerRef.current.initializeChunkStream({
1137
- mimeCodec: "audio/mpeg",
1138
- onComplete: () => {
1139
- logger.debug("Audio stream playback completed");
1140
- setVoiceAgentState("READY");
1141
- }
1142
- });
1143
- let chunkCount = 0;
1144
- const chunkReceivedEmitter = async (chunk) => {
1145
- if (chunk instanceof ArrayBuffer) {
1146
- chunkCount++;
1147
- logger.debug(
1148
- `Received voice chunk #${chunkCount} from socket, size: ${chunk.byteLength} bytes`
1149
- );
1150
- if (!chunk || chunk.byteLength === 0) {
1151
- logger.warn("Received empty chunk, skipping");
1152
- return;
1153
- }
1154
- try {
1155
- await addChunkToStream(chunk);
1156
- logger.debug(
1157
- `Successfully added chunk #${chunkCount} to audio stream`
1158
- );
1159
- } catch (err) {
1160
- logger.error(
1161
- `Failed to add chunk #${chunkCount} to audio stream`,
1162
- err
1163
- );
1164
- if (err instanceof Error) {
1165
- handleError("DOWNSTREAMING", err);
1166
- }
1167
- }
1168
- }
1169
- };
1170
- socketAdapter.on("chunk-received", chunkReceivedEmitter);
1171
- const endOfStreamEmitter = () => {
1172
- logger.debug(
1173
- `Received end of stream signal after ${chunkCount} chunks, ending chunk stream`
1174
- );
1175
- endChunkStream();
1176
- setVoiceAgentState("READY");
1177
- };
1178
- socketAdapter.on("received-end-of-response-stream", endOfStreamEmitter);
1179
- return () => {
1180
- logger.debug("Cleaning up socket event listeners");
1181
- socketAdapter.off("chunk-received", chunkReceivedEmitter);
1182
- socketAdapter.off(
1183
- "received-end-of-response-stream",
1184
- endOfStreamEmitter
1185
- );
1186
- endChunkStream();
1187
- };
1188
- } catch (err) {
1189
- if (err instanceof Error) {
1190
- handleError("DOWNSTREAMING", err);
1191
- }
1192
- return () => {
1193
- };
1194
- }
1195
- },
1196
- [handleError]
1197
- );
1198
- const hookupSocketAdapter = react.useCallback(
1199
- async (socketAdapter) => {
1200
- logger.debug("Connecting to socket...");
1201
- try {
1202
- await socketAdapter.connect();
1203
- socketAdapter.on("connect", () => {
1204
- logger.debug("Socket adapter connected");
1205
- setVoiceAgentState("READY");
1206
- });
1207
- socketAdapter.on("disconnect", () => {
1208
- logger.debug("Socket adapter disconnected");
1209
- });
1210
- socketAdapter.on("error", (err) => {
1211
- if (err instanceof Error) {
1212
- handleError(voiceAgentState, err);
1213
- } else {
1214
- handleError(voiceAgentState, new Error("Unknown error"));
1215
- }
1216
- });
1217
- setSocket(socketAdapter.exposeSocket());
1218
- } catch (err) {
1219
- if (err instanceof Error) {
1220
- handleError("READY", err);
1221
- }
1222
- }
1223
- },
1224
- [handleError, voiceAgentState]
1225
- );
1226
- const startRecording = react.useCallback(() => {
1227
- if (inputAudioControllerRef.current) {
1228
- try {
1229
- logger.debug("Starting recording");
1230
- setVoiceAgentState("RECORDING");
1231
- inputAudioControllerRef.current.startRecording({
1232
- onRecordedChunk: async (chunk) => {
1233
- if (shouldStreamWhileTalk) {
1234
- try {
1235
- await socketAdapterRef.current?.sendVoiceChunk(chunk);
1236
- } catch (err) {
1237
- if (err instanceof Error) {
1238
- handleError("RECORDING", err);
1239
- }
1240
- }
1241
- }
1242
- }
1243
- });
1244
- onStartRecording?.();
1245
- } catch (err) {
1246
- if (err instanceof Error) {
1247
- handleError("RECORDING", err);
1248
- }
1249
- }
1250
- }
1251
- }, [onStartRecording, shouldStreamWhileTalk, handleError]);
1252
- const stopRecording = react.useCallback(async () => {
1253
- if (inputAudioControllerRef.current) {
1254
- try {
1255
- logger.debug("Stopping recording");
1256
- await inputAudioControllerRef.current.stopRecording({
1257
- onRecordingCompleted: async (allData) => {
1258
- setVoiceAgentState("PROCESSING");
1259
- try {
1260
- if (shouldStreamWhileTalk) {
1261
- logger.debug("Committing voice message");
1262
- await socketAdapterRef.current?.commitVoiceMessage();
1263
- } else {
1264
- await socketAdapterRef.current?.sendVoiceFile(allData);
1265
- }
1266
- setVoiceAgentState("DOWNSTREAMING");
1267
- await subscribeToSocketEventsForChunkDownstreaming(
1268
- socketAdapterRef.current
1269
- );
1270
- onReceive?.(
1271
- allData,
1272
- async () => {
1273
- if (outputAudioControllerRef.current) {
1274
- return outputAudioControllerRef.current.stopPlayback();
1275
- }
1276
- },
1277
- async () => {
1278
- if (outputAudioControllerRef.current) {
1279
- return outputAudioControllerRef.current.stopPlayback();
1280
- }
1281
- }
1282
- );
1283
- } catch (err) {
1284
- if (err instanceof Error) {
1285
- handleError("PROCESSING", err);
1286
- }
1287
- }
1288
- }
1289
- });
1290
- onStopRecording?.();
1291
- } catch (err) {
1292
- if (err instanceof Error) {
1293
- handleError("RECORDING", err);
1294
- }
1295
- }
1296
- }
1297
- }, [
1298
- onStopRecording,
1299
- handleError,
1300
- subscribeToSocketEventsForChunkDownstreaming,
1301
- onReceive
1302
- ]);
1303
- react.useEffect(() => {
1304
- if (socketAdapterRef.current) {
1305
- return;
1306
- }
1307
- try {
1308
- const socketAdapter = socketConfig.socketAdapter ? socketConfig.socketAdapter : new VoiceSocketIOAdapter({
1309
- scope,
1310
- baseUrl: socketConfig.baseUrl || "",
1311
- headers: socketConfig.headers
1312
- });
1313
- socketAdapterRef.current = socketAdapter;
1314
- if (!socketAdapter.isConnected()) {
1315
- hookupSocketAdapter(socketAdapter);
1316
- }
1317
- if (!inputAudioControllerRef.current) {
1318
- inputAudioControllerRef.current = new WebAudioInputAudioController(
1319
- audioConfig
1320
- );
1321
- }
1322
- if (!outputAudioControllerRef.current) {
1323
- outputAudioControllerRef.current = new WebAudioOutputAudioController();
1324
- }
1325
- } catch (err) {
1326
- if (err instanceof Error) {
1327
- handleError("READY", err);
1328
- }
1329
- }
1330
- }, [scope, socketConfig, hookupSocketAdapter, audioConfig, handleError]);
1331
- react.useEffect(() => {
1332
- return () => {
1333
- inputAudioControllerRef.current?.cleanup();
1334
- outputAudioControllerRef.current?.cleanup();
1335
- if (socketAdapterRef.current) {
1336
- socketAdapterRef.current.disconnect();
1337
- socketAdapterRef.current = void 0;
1338
- }
1339
- };
1340
- }, []);
1341
- return {
1342
- startRecording,
1343
- stopRecording,
1344
- voiceAgentState,
1345
- error,
1346
- audioContext: inputAudioControllerRef.current?.audioContext || null,
1347
- socket
1348
- };
1349
- }
1350
- var AiCursorComponentStyle = lit.css`
1351
- :host {
1352
- --ai-local-cursor-size: var(--sk-ai-cursor-size, 1rem);
1353
- --ai-local-cursor-label-padding: var(
1354
- --sk-ai-cursor-label-padding,
1355
- 0.25rem 0.25rem
1356
- );
1357
- --ai-local-cursor-border-radius: var(--sk-ai-cursor-border-radius, 0.25rem);
1358
- --ai-local-label-offset: var(--sk-ai-cursor-label-offset, 1rem);
1359
-
1360
- --ai-local-label-font-size: var(--sk-ai-cursor-label-font-size, 12px);
1361
- --ai-local-label-font-weight: var(--sk-ai-cursor-label-font-weight, bold);
1362
- --ai-local-label-color: var(--sk-ai-cursor-label-color, white);
1363
- --ai-local-label-background-color: var(
1364
- --sk-ai-cursor-label-background-color,
1365
- black
1366
- );
1367
- --ai-local-label-border-color: var(
1368
- --sk-ai-cursor-label-border-color,
1369
- white
1370
- );
1371
- --ai-local-label-border-width: var(
1372
- --sk-ai-cursor-label-border-width,
1373
- 0.1rem
1374
- );
1375
-
1376
- color: black;
1377
- stroke: white;
1378
- position: absolute;
1379
- /* Insetting in the parent element (body) */
1380
- top: 0;
1381
- left: 0;
1382
- bottom: 0;
1383
- right: 0;
1384
- pointer-events: none;
1385
- width: var(--ai-local-cursor-size);
1386
- height: var(--ai-local-cursor-size);
1387
- }
1388
-
1389
- #cursor-graphic-parent {
1390
- position: absolute;
1391
- top: 0;
1392
- left: 0;
1393
- }
1394
-
1395
- #label-text {
1396
- position: absolute;
1397
- color: white;
1398
- font-size: 12px;
1399
- font-weight: bold;
1400
- padding: var(--ai-local-cursor-label-padding);
1401
- border-radius: var(--ai-local-cursor-border-radius);
1402
-
1403
- white-space: nowrap;
1404
- overflow: hidden;
1405
- text-overflow: ellipsis;
1406
-
1407
- width: fit-content;
1408
- min-width: fit-content;
1409
- top: var(--ai-local-label-offset);
1410
- left: var(--ai-local-label-offset);
1411
-
1412
- border: var(--ai-local-label-border-width) solid
1413
- var(--ai-local-label-border-color);
1414
- background-color: var(--ai-local-label-background-color);
1415
- color: var(--ai-local-label-color);
1416
- font-size: var(--ai-local-label-font-size);
1417
- font-weight: var(--ai-local-label-font-weight);
1418
- }
1419
- `;
1420
-
1421
- // src/ui/ai-cursor/rendering/AiCursorComponent.ts
1422
- var AiCursorComponent = class extends lit.LitElement {
1423
- constructor() {
1424
- super();
1425
- this.eventHooks = {
1426
- defineSetPosition: () => {
1427
- },
1428
- defineAddPositionToQueue: () => {
1429
- },
1430
- definePlayQueue: () => {
1431
- },
1432
- defineSetShowCursor: () => {
1433
- }
1434
- };
1435
- this.isShowingCursor = true;
1436
- this.labelText = "AI Cursor";
1437
- this.cursorPosition = [0, 0];
1438
- this._cursorRef = ref_js.createRef();
1439
- this._labelRef = ref_js.createRef();
1440
- }
1441
- updated(_changedProperties) {
1442
- if (_changedProperties.has("_cursorRef")) {
1443
- if (this._cursorRef.value) {
1444
- this.hookUpCallbacks();
1445
- } else {
1446
- this._timeline?.pause();
1447
- this._timeline?.refresh();
1448
- }
1449
- }
1450
- super.updated(_changedProperties);
1451
- }
1452
- render() {
1453
- const cursorSvg = lit.html`
1454
- <svg
1455
- width=${24}
1456
- height=${24}
1457
- viewBox="0 0 100 100"
1458
- fill="none"
1459
- xmlns="http://www.w3.org/2000/svg"
1460
- >
1461
- <g clip-path="url(#clip0_3576_285)">
1462
- <path
1463
- class="cursor-path"
1464
- d="M2.14849 7.04749C1.35153 4.07321 4.07319 1.35155 7.04747 2.14851L77.3148 20.9766C80.2891 21.7735 81.2853 25.4914 79.108 27.6687L27.6687 79.108C25.4914 81.2853 21.7735 80.2891 20.9766 77.3149L2.14849 7.04749Z"
1465
- fill="currentColor"
1466
- />
1467
- </g>
1468
- <defs>
1469
- <clipPath id="clip0_3576_285">
1470
- <rect width="100" height="100" fill="white" />
1471
- </clipPath>
1472
- </defs>
1473
- </svg>
1474
- `;
1475
- return lit.html`
1476
- <span
1477
- id="cursor-graphic-parent"
1478
- ${ref_js.ref(this._cursorRef)}
1479
- ?hidden=${!this.isShowingCursor}
1480
- >
1481
- ${cursorSvg}
1482
- <span
1483
- ${ref_js.ref(this._labelRef)}
1484
- id="label-text"
1485
- ?hidden=${!this.isShowingCursor}
1486
- >${this.labelText}</span
1487
- >
1488
- </span>
1489
- `;
1490
- }
1491
- // private methods
1492
- /**
1493
- * The primary way to control the cursor is using an external API.
1494
- * This interface exposes controlling methods. The Lit Component itself is
1495
- * intended to be a controlled component.
1496
- */
1497
- hookUpCallbacks() {
1498
- const animationTarget = this._cursorRef.value;
1499
- if (!animationTarget) {
1500
- return;
1501
- }
1502
- this._timeline = animejs.createTimeline({ defaults: { duration: 750 } });
1503
- if (!this._timeline) {
1504
- return;
1505
- }
1506
- this.eventHooks.defineSetPosition((position) => {
1507
- this._timeline?.add(animationTarget, {
1508
- translateX: position[0],
1509
- translateY: position[1],
1510
- duration: 1
1511
- });
1512
- this._timeline?.play();
1513
- });
1514
- this.eventHooks.defineAddPositionToQueue((position) => {
1515
- this._timeline?.add(animationTarget, {
1516
- translateX: position[0],
1517
- translateY: position[1],
1518
- duration: 1e3
1519
- });
1520
- });
1521
- this.eventHooks.defineSetShowCursor((show) => {
1522
- this.isShowingCursor = show;
1523
- });
1524
- this.eventHooks.definePlayQueue(() => {
1525
- this._timeline?.play();
1526
- });
1527
- }
1528
- // Getters
1529
- get cursorRef() {
1530
- return this._cursorRef.value;
1531
- }
1532
- get labelRef() {
1533
- return this._labelRef.value;
1534
- }
1535
- };
1536
- // Define scoped styles right with your component, in plain CSS
1537
- AiCursorComponent.styles = AiCursorComponentStyle;
1538
- __decorateClass([
1539
- decorators_js.property({
1540
- type: Object
1541
- })
1542
- ], AiCursorComponent.prototype, "eventHooks", 2);
1543
- __decorateClass([
1544
- decorators_js.property({ type: Boolean })
1545
- ], AiCursorComponent.prototype, "isShowingCursor", 2);
1546
- __decorateClass([
1547
- decorators_js.property({ type: String })
1548
- ], AiCursorComponent.prototype, "labelText", 2);
1549
- __decorateClass([
1550
- decorators_js.property({ type: Array })
1551
- ], AiCursorComponent.prototype, "cursorPosition", 2);
1552
- __decorateClass([
1553
- decorators_js.state()
1554
- ], AiCursorComponent.prototype, "_cursorRef", 2);
1555
- __decorateClass([
1556
- decorators_js.state()
1557
- ], AiCursorComponent.prototype, "_labelRef", 2);
1558
- AiCursorComponent = __decorateClass([
1559
- decorators_js.customElement("ai-cursor")
1560
- ], AiCursorComponent);
1561
-
1562
- // src/ui/ai-cursor/rendering/index.ts
1563
- var mountAiCursor = (aiCursorProps) => {
1564
- const root = document.body;
1565
- const cursor = document.createElement("ai-cursor");
1566
- cursor.eventHooks = aiCursorProps.eventHooks;
1567
- root.appendChild(cursor);
1568
- };
1569
-
1570
- // src/ui/ai-cursor/AiCursor.ts
1571
- var AiCursor = class _AiCursor {
1572
- constructor() {
1573
- }
1574
- // Static constructors
1575
- static spawn() {
1576
- const newCursor = new _AiCursor();
1577
- newCursor.mount();
1578
- return newCursor;
1579
- }
1580
- jumpTo(target) {
1581
- const position = targetToPosition(target);
1582
- if (position) {
1583
- this.setPosition?.(position);
1584
- }
1585
- }
1586
- moveTo(target) {
1587
- const position = targetToPosition(target);
1588
- if (position) {
1589
- this.addPositionToQueue?.(position);
1590
- this.playQueue?.();
1591
- }
1592
- }
1593
- scheduleMoves(targets) {
1594
- targets.forEach((target) => {
1595
- const position = targetToPosition(target);
1596
- if (position) {
1597
- this.addPositionToQueue?.(position);
1598
- }
1599
- });
1600
- this.playQueue?.();
1601
- }
1602
- show() {
1603
- this.setShowCursor?.(true);
1604
- }
1605
- hide() {
1606
- this.setShowCursor?.(false);
1607
- }
1608
- mount() {
1609
- mountAiCursor({
1610
- eventHooks: {
1611
- defineSetPosition: (callback) => {
1612
- this.setPosition = callback;
1613
- },
1614
- defineAddPositionToQueue: (callback) => {
1615
- this.addPositionToQueue = callback;
1616
- },
1617
- definePlayQueue: (callback) => {
1618
- this.playQueue = callback;
1619
- },
1620
- defineSetShowCursor: (callback) => {
1621
- this.setShowCursor = callback;
1622
- }
1623
- }
1624
- });
1625
- }
1626
- };
1627
- function calculateClickPositionFromElement(element) {
1628
- const rect = element.getBoundingClientRect();
1629
- return [rect.left + rect.width / 2, rect.top + rect.height / 2];
1630
- }
1631
- function targetToPosition(target) {
1632
- if (Array.isArray(target) && target.length === 2 && typeof target[0] === "number" && typeof target[1] === "number") {
1633
- return target;
1634
- } else if (target instanceof HTMLElement) {
1635
- return calculateClickPositionFromElement(target);
1636
- } else if (typeof target === "string") {
1637
- const element = document.querySelector(target);
1638
- if (element) {
1639
- return calculateClickPositionFromElement(element);
1640
- }
1641
- }
1642
- return void 0;
1643
- }
1644
-
1645
26
  // src/stream/Pump.ts
1646
27
  var Pump = class _Pump {
1647
28
  constructor(src) {
@@ -2827,7 +1208,16 @@ var createEventPlane = (options) => effect.Effect.gen(function* () {
2827
1208
  };
2828
1209
  const publishToPubSub = (channel, envelope) => effect.PubSub.publish(getPubsub(channel), envelope);
2829
1210
  const publish = (channel, envelope) => effect.Effect.sync(() => recordEvent(envelope)).pipe(
2830
- effect.Effect.flatMap(() => publishToPubSub(channel, envelope))
1211
+ effect.Effect.flatMap(() => publishToPubSub(channel, envelope)),
1212
+ effect.Effect.withSpan("event.publish", {
1213
+ attributes: {
1214
+ "event.name": envelope.name,
1215
+ "event.payload": payloadForSpan(envelope.payload),
1216
+ channel,
1217
+ runId: envelope.meta.runId,
1218
+ contextId: envelope.meta.contextId
1219
+ }
1220
+ })
2831
1221
  );
2832
1222
  const publishToChannels = (targetChannels, envelope) => effect.Effect.sync(() => recordEvent(envelope)).pipe(
2833
1223
  effect.Effect.flatMap(
@@ -2836,7 +1226,15 @@ var createEventPlane = (options) => effect.Effect.gen(function* () {
2836
1226
  { concurrency: "unbounded" }
2837
1227
  )
2838
1228
  ),
2839
- effect.Effect.map((results) => results.every(Boolean))
1229
+ effect.Effect.map((results) => results.every(Boolean)),
1230
+ effect.Effect.withSpan("event.publish", {
1231
+ attributes: {
1232
+ "event.name": envelope.name,
1233
+ "event.payload": payloadForSpan(envelope.payload),
1234
+ runId: envelope.meta.runId,
1235
+ contextId: envelope.meta.contextId
1236
+ }
1237
+ })
2840
1238
  );
2841
1239
  const subscribe = (channel) => effect.PubSub.subscribe(getPubsub(channel));
2842
1240
  const getRunEvents = (runId, contextId) => {
@@ -2869,8 +1267,17 @@ var createEventPlane = (options) => effect.Effect.gen(function* () {
2869
1267
  shutdown
2870
1268
  };
2871
1269
  });
2872
- var runSubscriber = (agent, publishesTo, dequeue, plane, emitQueue) => effect.Effect.gen(function* () {
1270
+ function payloadForSpan(payload, maxLen = 500) {
1271
+ try {
1272
+ const s = JSON.stringify(payload);
1273
+ return s.length > maxLen ? `${s.slice(0, maxLen)}...` : s;
1274
+ } catch {
1275
+ return String(payload);
1276
+ }
1277
+ }
1278
+ var runSubscriber = (agent, publishesTo, dequeue, plane, emitQueue, channelName) => effect.Effect.gen(function* () {
2873
1279
  const listensTo = agent.getListensTo?.() ?? [];
1280
+ const agentId = agent.getId();
2874
1281
  const processOne = () => effect.Effect.gen(function* () {
2875
1282
  const envelope = yield* effect.Queue.take(dequeue);
2876
1283
  if (listensTo.length > 0 && !listensTo.includes(envelope.name)) {
@@ -2881,34 +1288,51 @@ var runSubscriber = (agent, publishesTo, dequeue, plane, emitQueue) => effect.Ef
2881
1288
  envelope.meta.contextId
2882
1289
  );
2883
1290
  const contextEvents = plane.getContextEvents(envelope.meta.contextId);
2884
- yield* effect.Effect.tryPromise({
2885
- try: () => agent.invoke({
2886
- triggerEvent: envelope,
2887
- emit: (userEvent) => {
2888
- const fullEnvelope = {
2889
- name: userEvent.name,
2890
- meta: envelope.meta,
2891
- payload: userEvent.payload
2892
- };
2893
- if (emitQueue) {
2894
- effect.Effect.runPromise(
2895
- effect.Queue.offer(emitQueue, {
2896
- channels: publishesTo,
2897
- envelope: fullEnvelope
2898
- })
2899
- ).catch(() => {
2900
- });
2901
- } else {
2902
- effect.Effect.runFork(
2903
- plane.publishToChannels(publishesTo, fullEnvelope)
2904
- );
2905
- }
2906
- },
2907
- runEvents,
2908
- contextEvents
2909
- }),
2910
- catch: (e) => e
2911
- });
1291
+ yield* effect.Effect.withSpan("agent.listen", {
1292
+ attributes: {
1293
+ agentId,
1294
+ "event.name": envelope.name,
1295
+ "event.payload": payloadForSpan(envelope.payload),
1296
+ ...channelName !== void 0 && { channel: channelName }
1297
+ }
1298
+ })(
1299
+ effect.Effect.withSpan("agent.invoke", {
1300
+ attributes: {
1301
+ agentId,
1302
+ "event.name": envelope.name,
1303
+ "event.payload": payloadForSpan(envelope.payload)
1304
+ }
1305
+ })(
1306
+ effect.Effect.tryPromise({
1307
+ try: () => agent.invoke({
1308
+ triggerEvent: envelope,
1309
+ emit: (userEvent) => {
1310
+ const fullEnvelope = {
1311
+ name: userEvent.name,
1312
+ meta: envelope.meta,
1313
+ payload: userEvent.payload
1314
+ };
1315
+ if (emitQueue) {
1316
+ effect.Effect.runPromise(
1317
+ effect.Queue.offer(emitQueue, {
1318
+ channels: publishesTo,
1319
+ envelope: fullEnvelope
1320
+ })
1321
+ ).catch(() => {
1322
+ });
1323
+ } else {
1324
+ effect.Effect.runFork(
1325
+ plane.publishToChannels(publishesTo, fullEnvelope)
1326
+ );
1327
+ }
1328
+ },
1329
+ runEvents,
1330
+ contextEvents
1331
+ }),
1332
+ catch: (e) => e
1333
+ })
1334
+ )
1335
+ );
2912
1336
  }).pipe(
2913
1337
  effect.Effect.catchAllCause(
2914
1338
  (cause) => effect.Cause.isInterrupted(cause) ? effect.Effect.void : effect.Effect.sync(() => {
@@ -2930,7 +1354,8 @@ var run = (network, plane, options) => effect.Effect.gen(function* () {
2930
1354
  reg.publishesTo,
2931
1355
  dequeue,
2932
1356
  plane,
2933
- emitQueue
1357
+ emitQueue,
1358
+ channel.name
2934
1359
  );
2935
1360
  }
2936
1361
  }
@@ -3004,7 +1429,8 @@ function expose(network, options) {
3004
1429
  select,
3005
1430
  plane: providedPlane,
3006
1431
  onRequest,
3007
- triggerEvents
1432
+ triggerEvents,
1433
+ tracingLayer
3008
1434
  } = options;
3009
1435
  const triggerEventDef = triggerEvents?.[0];
3010
1436
  const triggerEventName = triggerEventDef?.name ?? "request";
@@ -3018,7 +1444,7 @@ function expose(network, options) {
3018
1444
  const payload = await extractPayload(req);
3019
1445
  const signal = req.request?.signal;
3020
1446
  const program = effect.Effect.gen(function* () {
3021
- const plane = providedPlane ?? (yield* createEventPlane({ network }));
1447
+ const plane = providedPlane ?? (yield* createEventPlane({ network, store: network.getStore() }));
3022
1448
  if (!providedPlane) {
3023
1449
  const emitQueue = yield* effect.Queue.unbounded();
3024
1450
  yield* effect.Effect.fork(
@@ -3086,7 +1512,8 @@ function expose(network, options) {
3086
1512
  }
3087
1513
  return stream;
3088
1514
  });
3089
- return effect.Effect.runPromise(program.pipe(effect.Effect.scoped));
1515
+ const runnable = tracingLayer ? program.pipe(effect.Effect.provide(tracingLayer), effect.Effect.scoped) : program.pipe(effect.Effect.scoped);
1516
+ return effect.Effect.runPromise(runnable);
3090
1517
  };
3091
1518
  return {
3092
1519
  protocol: "sse",
@@ -3118,6 +1545,7 @@ var AgentNetwork = class _AgentNetwork {
3118
1545
  this.channels = /* @__PURE__ */ new Map();
3119
1546
  this.agentRegistrations = /* @__PURE__ */ new Map();
3120
1547
  this.spawnerRegistrations = [];
1548
+ this._store = createInMemoryNetworkStore();
3121
1549
  }
3122
1550
  /* ─── Public Static Factory ─── */
3123
1551
  static setup(callback) {
@@ -3205,6 +1633,10 @@ var AgentNetwork = class _AgentNetwork {
3205
1633
  getSpawnerRegistrations() {
3206
1634
  return this.spawnerRegistrations;
3207
1635
  }
1636
+ /** Store defined at network setup time. Shared across all event planes created for this network. */
1637
+ getStore() {
1638
+ return this._store;
1639
+ }
3208
1640
  /**
3209
1641
  * Expose the network as a streamable API (e.g. SSE). Returns an ExposedAPI
3210
1642
  * that adapters (NextEndpoint, ExpressEndpoint) consume to produce streamed
@@ -3230,7 +1662,11 @@ var AgentNetwork = class _AgentNetwork {
3230
1662
  }
3231
1663
  runScoped(network, capacity) {
3232
1664
  return effect.Effect.gen(function* () {
3233
- const plane = yield* createEventPlane({ network, capacity });
1665
+ const plane = yield* createEventPlane({
1666
+ network,
1667
+ capacity,
1668
+ store: network.getStore()
1669
+ });
3234
1670
  yield* effect.Effect.fork(run(network, plane));
3235
1671
  return plane;
3236
1672
  });
@@ -3388,6 +1824,157 @@ var AgentFactory = class _AgentFactory {
3388
1824
  );
3389
1825
  }
3390
1826
  };
1827
+ var CAMEL_CASE_REGEX = /^[a-z][a-zA-Z0-9]*$/;
1828
+ var LayerName = effect.Brand.refined(
1829
+ (s) => typeof s === "string" && CAMEL_CASE_REGEX.test(s),
1830
+ (s) => effect.Brand.error(`Expected camelCase (e.g. myLayerFoo), got: ${s}`)
1831
+ );
1832
+ var SkillDependency = {
1833
+ of(config) {
1834
+ const name = LayerName(config.name);
1835
+ const decode = effect.Schema.decodeUnknown(config.shape);
1836
+ return {
1837
+ _tag: "SkillDependencyDef",
1838
+ name,
1839
+ _name: config.name,
1840
+ shape: config.shape,
1841
+ decode
1842
+ };
1843
+ }
1844
+ };
1845
+ function toLayerArray(layers) {
1846
+ if (layers.length === 1 && Array.isArray(layers[0])) {
1847
+ return layers[0];
1848
+ }
1849
+ return [...layers];
1850
+ }
1851
+ function assertUniqueLayerNames(layers) {
1852
+ const seen = /* @__PURE__ */ new Set();
1853
+ for (const dep of layers) {
1854
+ const key = dep.name;
1855
+ if (seen.has(key)) {
1856
+ throw new Error(`Duplicate layer name: ${key}`);
1857
+ }
1858
+ seen.add(key);
1859
+ }
1860
+ }
1861
+ var Skill = class _Skill {
1862
+ constructor(params) {
1863
+ this._inputSchema = params.inputSchema;
1864
+ this._chunkSchema = params.chunkSchema;
1865
+ this._doneSchema = params.doneSchema;
1866
+ this._layers = params.layers;
1867
+ this._defineFn = params.defineFn;
1868
+ }
1869
+ getState() {
1870
+ return {
1871
+ inputSchema: this._inputSchema,
1872
+ chunkSchema: this._chunkSchema,
1873
+ doneSchema: this._doneSchema,
1874
+ layers: this._layers,
1875
+ defineFn: this._defineFn
1876
+ };
1877
+ }
1878
+ static of(_options) {
1879
+ return new _Skill({
1880
+ layers: []
1881
+ });
1882
+ }
1883
+ input(schema) {
1884
+ return new _Skill({
1885
+ ...this.getState(),
1886
+ inputSchema: schema
1887
+ });
1888
+ }
1889
+ chunk(schema) {
1890
+ return new _Skill({
1891
+ ...this.getState(),
1892
+ chunkSchema: schema
1893
+ });
1894
+ }
1895
+ done(schema) {
1896
+ return new _Skill({
1897
+ ...this.getState(),
1898
+ doneSchema: schema
1899
+ });
1900
+ }
1901
+ use(...layers) {
1902
+ const normalized = toLayerArray(layers);
1903
+ const allLayers = [...this._layers, ...normalized];
1904
+ assertUniqueLayerNames(allLayers);
1905
+ return new _Skill({
1906
+ ...this.getState(),
1907
+ layers: allLayers
1908
+ });
1909
+ }
1910
+ define(fn) {
1911
+ const state = this.getState();
1912
+ const inputSchema = state.inputSchema;
1913
+ const chunkSchema = state.chunkSchema;
1914
+ const doneSchema = state.doneSchema;
1915
+ const defineFn = fn;
1916
+ if (!inputSchema || !chunkSchema || !doneSchema || !defineFn) {
1917
+ throw new Error(
1918
+ "Skill.define requires input(), chunk(), and done() to be called before define()"
1919
+ );
1920
+ }
1921
+ const decodeInput = effect.Schema.decodeUnknown(inputSchema);
1922
+ const decodeChunk = effect.Schema.decodeUnknown(chunkSchema);
1923
+ const decodeDone = effect.Schema.decodeUnknown(doneSchema);
1924
+ const runDefine = async (input, runtime) => {
1925
+ const layersObj = runtime?.layers ?? {};
1926
+ const chunks = [];
1927
+ const emit = (chunk) => {
1928
+ const decoded = effect.Effect.runSync(
1929
+ decodeChunk(chunk)
1930
+ );
1931
+ chunks.push(decoded);
1932
+ };
1933
+ const done = await defineFn({
1934
+ input,
1935
+ emit,
1936
+ layers: layersObj
1937
+ });
1938
+ const decodedDone = effect.Effect.runSync(
1939
+ decodeDone(done)
1940
+ );
1941
+ return { chunks, done: decodedDone };
1942
+ };
1943
+ return {
1944
+ invokeStream: async function* (input, runtime) {
1945
+ const decodedInput = effect.Effect.runSync(
1946
+ decodeInput(input)
1947
+ );
1948
+ const layersObj = runtime?.layers ?? {};
1949
+ const chunks = [];
1950
+ const emit = (chunk) => {
1951
+ const decoded = effect.Effect.runSync(
1952
+ decodeChunk(chunk)
1953
+ );
1954
+ chunks.push(decoded);
1955
+ };
1956
+ const done = await defineFn({
1957
+ input: decodedInput,
1958
+ emit,
1959
+ layers: layersObj
1960
+ });
1961
+ const decodedDone = effect.Effect.runSync(
1962
+ decodeDone(done)
1963
+ );
1964
+ for (const c of chunks) {
1965
+ yield c;
1966
+ }
1967
+ yield { _tag: "Done", done: decodedDone };
1968
+ },
1969
+ invoke: async (input, runtime) => {
1970
+ const decodedInput = effect.Effect.runSync(
1971
+ decodeInput(input)
1972
+ );
1973
+ return runDefine(decodedInput, runtime);
1974
+ }
1975
+ };
1976
+ }
1977
+ };
3391
1978
 
3392
1979
  // src/matrix/io/protocols/sse.ts
3393
1980
  function formatSSE(envelope) {
@@ -3525,6 +2112,78 @@ var ExpressEndpoint = {
3525
2112
  };
3526
2113
  }
3527
2114
  };
2115
+ var randomHexString = (length) => {
2116
+ const chars = "abcdef0123456789";
2117
+ let result = "";
2118
+ for (let i = 0; i < length; i++) {
2119
+ result += chars.charAt(Math.floor(Math.random() * chars.length));
2120
+ }
2121
+ return result;
2122
+ };
2123
+ var ConsoleSpan = class {
2124
+ constructor(name, parent, context, links, startTime, kind, depth) {
2125
+ this.name = name;
2126
+ this.parent = parent;
2127
+ this.context = context;
2128
+ this.startTime = startTime;
2129
+ this.kind = kind;
2130
+ this.depth = depth;
2131
+ this._tag = "Span";
2132
+ this.sampled = true;
2133
+ this.attributes = /* @__PURE__ */ new Map();
2134
+ this.links = [];
2135
+ this.traceId = parent._tag === "Some" ? parent.value.traceId : randomHexString(32);
2136
+ this.spanId = randomHexString(16);
2137
+ this.links = Array.from(links);
2138
+ this.status = { _tag: "Started", startTime };
2139
+ }
2140
+ end(endTime, exit) {
2141
+ if (this.status._tag === "Ended")
2142
+ return;
2143
+ const startTime = this.status.startTime;
2144
+ const durationNs = endTime - startTime;
2145
+ const durationMs = Number(durationNs) / 1e6;
2146
+ const indent = " ".repeat(this.depth);
2147
+ const attrs = Object.fromEntries(this.attributes);
2148
+ const status = effect.Exit.isSuccess(exit) ? "ok" : "error";
2149
+ console.log(
2150
+ `${indent}[trace] ${this.name} ${durationMs.toFixed(2)}ms (${status})`,
2151
+ Object.keys(attrs).length > 0 ? attrs : ""
2152
+ );
2153
+ this.status = { _tag: "Ended", startTime, endTime, exit };
2154
+ }
2155
+ attribute(key, value) {
2156
+ this.attributes.set(key, value);
2157
+ }
2158
+ event(_name, _startTime, _attributes) {
2159
+ }
2160
+ addLinks(links) {
2161
+ this.links.push(...links);
2162
+ }
2163
+ };
2164
+ function getDepth(parent) {
2165
+ if (parent._tag === "None")
2166
+ return 0;
2167
+ const p = parent.value;
2168
+ if (p._tag === "ExternalSpan")
2169
+ return 0;
2170
+ return 1 + getDepth(p.parent);
2171
+ }
2172
+ var consoleTracer = effect.Tracer.make({
2173
+ span: (name, parent, context, links, startTime, kind) => new ConsoleSpan(
2174
+ name,
2175
+ parent,
2176
+ context,
2177
+ links,
2178
+ startTime,
2179
+ kind,
2180
+ getDepth(parent)
2181
+ ),
2182
+ context: (f) => f()
2183
+ });
2184
+ var consoleTracerLayer = effect.Layer.setTracer(
2185
+ consoleTracer
2186
+ );
3528
2187
 
3529
2188
  Object.defineProperty(exports, 'S', {
3530
2189
  enumerable: true,
@@ -3534,29 +2193,26 @@ exports.Agent = Agent;
3534
2193
  exports.AgentFactory = AgentFactory;
3535
2194
  exports.AgentNetwork = AgentNetwork;
3536
2195
  exports.AgentNetworkEvent = AgentNetworkEvent;
3537
- exports.AiCursor = AiCursor;
3538
- exports.BaseVoiceEndpointAdapter = BaseVoiceEndpointAdapter;
3539
2196
  exports.Channel = Channel;
3540
2197
  exports.ChannelName = ChannelName;
3541
2198
  exports.ConfiguredChannel = ConfiguredChannel;
3542
- exports.Emitter = Emitter;
3543
2199
  exports.EventMetaSchema = EventMetaSchema;
3544
2200
  exports.ExposeAuthError = ExposeAuthError;
3545
2201
  exports.ExpressEndpoint = ExpressEndpoint;
3546
- exports.InputAudioController = InputAudioController;
2202
+ exports.LayerName = LayerName;
3547
2203
  exports.NextEndpoint = NextEndpoint;
3548
2204
  exports.Pump = Pump;
3549
2205
  exports.Sink = Sink;
2206
+ exports.Skill = Skill;
2207
+ exports.SkillDependency = SkillDependency;
3550
2208
  exports.SocketIoFactory = SocketIoFactory;
3551
2209
  exports.TransformMessages = TransformMessages;
3552
- exports.VoiceEndpointAdapter = VoiceEndpointAdapter;
3553
- exports.VoiceSocketAdapter = VoiceSocketAdapter;
2210
+ exports.consoleTracer = consoleTracer;
2211
+ exports.consoleTracerLayer = consoleTracerLayer;
3554
2212
  exports.ensureFullWords = ensureFullWords;
3555
2213
  exports.formatSSE = formatSSE;
3556
2214
  exports.httpStreamResponse = httpStreamResponse;
3557
2215
  exports.isHttpStreamSink = isHttpStreamSink;
3558
2216
  exports.toSSEStream = toSSEStream;
3559
- exports.useConversation = useConversation;
3560
- exports.useSocketConversation = useSocketConversation;
3561
2217
  //# sourceMappingURL=out.js.map
3562
2218
  //# sourceMappingURL=index.cjs.map