@m4trix/core 0.8.1 → 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,25 +1,8 @@
1
- import { useRef, useState, useCallback, useEffect } from 'react';
2
- import { io } from 'socket.io-client';
3
- import { css, LitElement, html } from 'lit';
4
- import { property, state, customElement } from 'lit/decorators.js';
5
- import { createRef, ref } from 'lit/directives/ref.js';
6
- import { createTimeline } from 'animejs';
7
1
  import { ToolMessage, AIMessage, HumanMessage } from '@langchain/core/messages';
8
- import { Brand, Schema, Effect, pipe, PubSub, Queue, Cause } from 'effect';
2
+ import { Brand, Schema, Tracer, Layer, Effect, pipe, Exit, PubSub, Queue, Cause } from 'effect';
9
3
  export { Schema as S } from 'effect';
10
4
  import { randomUUID } from 'crypto';
11
5
 
12
- var __defProp = Object.defineProperty;
13
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
14
- var __decorateClass = (decorators, target, key, kind) => {
15
- var result = kind > 1 ? void 0 : kind ? __getOwnPropDesc(target, key) : target;
16
- for (var i = decorators.length - 1, decorator; i >= 0; i--)
17
- if (decorator = decorators[i])
18
- result = (kind ? decorator(target, key, result) : decorator(result)) || result;
19
- if (kind && result)
20
- __defProp(target, key, result);
21
- return result;
22
- };
23
6
  var __accessCheck = (obj, member, msg) => {
24
7
  if (!member.has(obj))
25
8
  throw TypeError("Cannot " + msg);
@@ -39,1608 +22,6 @@ var __privateSet = (obj, member, value, setter) => {
39
22
  return value;
40
23
  };
41
24
 
42
- // src/utility/Logger.ts
43
- var _Logger = class _Logger {
44
- constructor(namespace = "") {
45
- this.namespace = namespace;
46
- }
47
- static enableGlobalLogging() {
48
- _Logger.globalEnabled = true;
49
- }
50
- static disableGlobalLogging() {
51
- _Logger.globalEnabled = false;
52
- }
53
- formatPrefix() {
54
- return this.namespace ? `[${this.namespace}]` : "";
55
- }
56
- logIfEnabled(level, ...args) {
57
- if (!_Logger.globalEnabled)
58
- return;
59
- const prefix = this.formatPrefix();
60
- if (prefix) {
61
- console[level](prefix, ...args);
62
- } else {
63
- console[level](...args);
64
- }
65
- }
66
- log(...args) {
67
- this.logIfEnabled("log", ...args);
68
- }
69
- debug(...args) {
70
- this.logIfEnabled("debug", ...args);
71
- }
72
- info(...args) {
73
- this.logIfEnabled("info", ...args);
74
- }
75
- warn(...args) {
76
- this.logIfEnabled("warn", ...args);
77
- }
78
- error(...args) {
79
- this.logIfEnabled("error", ...args);
80
- }
81
- };
82
- _Logger.globalEnabled = false;
83
- var Logger = _Logger;
84
-
85
- // src/react/adapter/VoiceEndpointAdapter.ts
86
- var VoiceEndpointAdapter = class {
87
- constructor(config) {
88
- this.logger = new Logger("SuTr > EndpointAdapter");
89
- this.config = config;
90
- }
91
- };
92
- var BaseVoiceEndpointAdapter = class extends VoiceEndpointAdapter {
93
- constructor(config) {
94
- super(config);
95
- }
96
- /**
97
- * Send a voice file to the API endpoint and return a Pump stream of audio chunks
98
- */
99
- async sendVoiceFile({
100
- blob,
101
- metadata
102
- }) {
103
- const formData = new FormData();
104
- formData.append("audio", blob);
105
- if (metadata) {
106
- formData.append("metadata", JSON.stringify(metadata));
107
- }
108
- this.logger.debug("Sending voice file to", this.config.endpoint, formData);
109
- const response = await fetch(
110
- `${this.config.baseUrl || ""}${this.config.endpoint}`,
111
- {
112
- method: "POST",
113
- headers: this.config.headers,
114
- body: formData
115
- }
116
- );
117
- if (!response.ok) {
118
- throw new Error(`API error: ${response.status} ${await response.text()}`);
119
- }
120
- if (!response.body) {
121
- throw new Error("No response body");
122
- }
123
- return response;
124
- }
125
- };
126
-
127
- // src/react/utility/audio/InputAudioController.ts
128
- var InputAudioController = class {
129
- constructor() {
130
- this.logger = new Logger("@m4trix/core > InputAudioController");
131
- }
132
- };
133
-
134
- // src/react/utility/audio/WebAudioInputAudioController.ts
135
- var DEFAULT_SLICING_INTERVAL = 3e3;
136
- var WebAudioInputAudioController = class extends InputAudioController {
137
- constructor(audioConfig = {}) {
138
- super();
139
- this.audioConfig = audioConfig;
140
- // ─── Recording state ─────────────────────────────────────────────────────
141
- this.audioContextState = {
142
- context: null,
143
- source: null,
144
- analyser: null
145
- };
146
- this.mediaRecorder = null;
147
- this.recordedChunks = [];
148
- this.recordingStream = null;
149
- }
150
- get audioContext() {
151
- return this.audioContextState.context;
152
- }
153
- async createAudioContext() {
154
- const context = new AudioContext({
155
- sampleRate: this.audioConfig.sampleRate || 16e3,
156
- latencyHint: "interactive"
157
- });
158
- const analyser = context.createAnalyser();
159
- analyser.fftSize = 2048;
160
- return { context, source: null, analyser };
161
- }
162
- async cleanupAudioContext() {
163
- this.logger.debug("Cleaning up audio context");
164
- const { source, context } = this.audioContextState;
165
- if (source)
166
- source.disconnect();
167
- if (context)
168
- await context.close();
169
- this.audioContextState = { context: null, source: null, analyser: null };
170
- }
171
- async startRecording({
172
- onRecordedChunk,
173
- onError
174
- } = {}) {
175
- try {
176
- this.logger.debug("Starting recording");
177
- this.recordedChunks = [];
178
- const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
179
- this.recordingStream = stream;
180
- if (!this.audioContextState.context) {
181
- this.audioContextState = await this.createAudioContext();
182
- }
183
- this.mediaRecorder = new MediaRecorder(stream, {
184
- mimeType: "audio/webm;codecs=opus"
185
- });
186
- this.mediaRecorder.ondataavailable = (e) => {
187
- if (e.data.size > 0) {
188
- this.recordedChunks.push(e.data);
189
- onRecordedChunk?.(e.data);
190
- this.logger.debug("Recorded chunk", e.data.size);
191
- }
192
- };
193
- this.mediaRecorder.start(DEFAULT_SLICING_INTERVAL);
194
- this.logger.debug("MediaRecorder started");
195
- } catch (err) {
196
- const error = err instanceof Error ? err : new Error("Failed to start recording");
197
- this.logger.error(error);
198
- onError?.(error);
199
- }
200
- }
201
- async stopRecording({
202
- onRecordingCompleted
203
- } = {}) {
204
- this.logger.debug("Stopping recording");
205
- if (!this.mediaRecorder || this.mediaRecorder.state === "inactive")
206
- return;
207
- await new Promise((resolve) => {
208
- this.mediaRecorder.onstop = async () => {
209
- if (this.recordedChunks.length) {
210
- const blob = new Blob(this.recordedChunks, { type: "audio/webm" });
211
- onRecordingCompleted?.(blob);
212
- this.logger.debug("Recording completed", blob.size);
213
- }
214
- this.recordingStream?.getTracks().forEach((t) => t.stop());
215
- this.recordingStream = null;
216
- await this.cleanupAudioContext();
217
- resolve();
218
- };
219
- this.mediaRecorder.stop();
220
- });
221
- }
222
- /**
223
- * Cleans up all audio recording resources.
224
- */
225
- cleanup() {
226
- this.cleanupAudioContext();
227
- if (this.mediaRecorder && this.mediaRecorder.state !== "inactive") {
228
- this.mediaRecorder.stop();
229
- }
230
- if (this.recordingStream) {
231
- this.recordingStream.getTracks().forEach((t) => t.stop());
232
- this.recordingStream = null;
233
- }
234
- }
235
- };
236
-
237
- // src/react/utility/audio/OutputAudioController.ts
238
- var OutputAudioController = class {
239
- constructor(loggerName) {
240
- this.logger = new Logger(loggerName);
241
- }
242
- };
243
-
244
- // src/react/utility/audio/AudioElementOutputAudioController.ts
245
- var AudioElementOutputAudioController = class extends OutputAudioController {
246
- constructor() {
247
- super("@m4trix/core > WebApiOutputAudioController");
248
- // ─── Playback state ──────────────────────────────────────────────────────
249
- this.currentHtmlAudio = null;
250
- this.currentAudioUrl = null;
251
- }
252
- // ─── One-shot playback ────────────────────────────────────────────────────
253
- /**
254
- * Play either a Blob or a URL string.
255
- * Uses <audio> under the hood for maximum browser compatibility.
256
- */
257
- async playAudio({
258
- source,
259
- onComplete
260
- }) {
261
- if (this.currentHtmlAudio) {
262
- this.currentHtmlAudio.pause();
263
- this.currentHtmlAudio.src = "";
264
- if (this.currentAudioUrl && source instanceof Blob) {
265
- URL.revokeObjectURL(this.currentAudioUrl);
266
- }
267
- }
268
- const audio = new Audio();
269
- this.currentHtmlAudio = audio;
270
- let url;
271
- if (source instanceof Blob) {
272
- url = URL.createObjectURL(source);
273
- this.currentAudioUrl = url;
274
- audio.onended = () => {
275
- URL.revokeObjectURL(url);
276
- onComplete?.();
277
- };
278
- } else {
279
- url = source;
280
- }
281
- audio.src = url;
282
- try {
283
- await audio.play();
284
- } catch (err) {
285
- this.logger.error("Playback failed, user gesture may be required", err);
286
- }
287
- }
288
- // ─── Streaming playback ──────────────────────────────────────────────────
289
- /**
290
- * Stream audio from a Response via MediaSource Extensions.
291
- * @param params.response The fetch Response whose body is an audio stream
292
- * @param params.mimeCodec MIME type+codec string, e.g. 'audio/mpeg'
293
- * @param params.onComplete Optional callback once the stream ends
294
- */
295
- async playAudioStream({
296
- response,
297
- mimeCodec = "audio/mpeg",
298
- onComplete
299
- }) {
300
- if (!response.ok || !response.body) {
301
- throw new Error(`Invalid response (${response.status})`);
302
- }
303
- if (typeof MediaSource === "undefined" || !MediaSource.isTypeSupported(mimeCodec)) {
304
- throw new Error(`Unsupported MIME type or codec: ${mimeCodec}`);
305
- }
306
- await this.stopPlayback();
307
- const mediaSource = new MediaSource();
308
- const url = URL.createObjectURL(mediaSource);
309
- this.currentAudioUrl = url;
310
- const audio = new Audio(url);
311
- this.currentHtmlAudio = audio;
312
- audio.autoplay = true;
313
- audio.onended = () => {
314
- URL.revokeObjectURL(url);
315
- this.currentAudioUrl = null;
316
- onComplete?.();
317
- };
318
- mediaSource.addEventListener(
319
- "sourceopen",
320
- () => {
321
- const sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
322
- const reader = response.body.getReader();
323
- const pump = async () => {
324
- const { done, value } = await reader.read();
325
- if (done) {
326
- mediaSource.endOfStream();
327
- return;
328
- }
329
- if (value) {
330
- sourceBuffer.appendBuffer(value);
331
- }
332
- if (sourceBuffer.updating) {
333
- sourceBuffer.addEventListener("updateend", pump, { once: true });
334
- } else {
335
- pump();
336
- }
337
- };
338
- pump();
339
- },
340
- { once: true }
341
- );
342
- try {
343
- await audio.play();
344
- } catch (err) {
345
- this.logger.error(
346
- "Streaming playback failed, user gesture may be required",
347
- err
348
- );
349
- }
350
- }
351
- // ─── Chunk-based streaming playback ─────────────────────────────────────
352
- /**
353
- * Initialize a streaming audio context for chunk-based playback.
354
- * This creates the necessary MediaSource and SourceBuffer for subsequent chunk additions.
355
- * Returns functions to add chunks and end the stream, encapsulated in a closure.
356
- *
357
- * @param mimeCodec MIME type+codec string, e.g. 'audio/mpeg'
358
- * @param onComplete Optional callback once the stream ends
359
- * @returns Object containing functions to add chunks and end the stream
360
- */
361
- async initializeChunkStream({
362
- onComplete,
363
- mimeCodec = "audio/mpeg"
364
- }) {
365
- this.logger.debug(`Initializing chunk stream with codec: ${mimeCodec}`);
366
- if (typeof MediaSource === "undefined") {
367
- throw new Error("MediaSource API is not supported in this browser");
368
- }
369
- if (!MediaSource.isTypeSupported(mimeCodec)) {
370
- this.logger.warn(
371
- `Codec ${mimeCodec} not supported, falling back to standard audio/mpeg`
372
- );
373
- mimeCodec = "audio/mpeg";
374
- if (!MediaSource.isTypeSupported(mimeCodec)) {
375
- throw new Error(
376
- "Neither the specified codec nor the fallback codec are supported"
377
- );
378
- }
379
- }
380
- await this.stopPlayback();
381
- const mediaSource = new MediaSource();
382
- let sourceBuffer = null;
383
- const url = URL.createObjectURL(mediaSource);
384
- this.currentAudioUrl = url;
385
- const audio = new Audio(url);
386
- this.currentHtmlAudio = audio;
387
- audio.autoplay = false;
388
- audio.controls = true;
389
- audio.style.display = "none";
390
- document.body.appendChild(audio);
391
- let playbackStarted = false;
392
- let hasReceivedFirstChunk = false;
393
- let receivedChunksCount = 0;
394
- const pendingChunks = [];
395
- let isProcessingQueue = false;
396
- this.logger.debug("Waiting for MediaSource to open...");
397
- await new Promise((resolve, reject) => {
398
- const timeout = setTimeout(() => {
399
- reject(new Error("MediaSource failed to open (timeout)"));
400
- }, 5e3);
401
- mediaSource.addEventListener(
402
- "sourceopen",
403
- () => {
404
- clearTimeout(timeout);
405
- this.logger.debug("MediaSource open event received");
406
- try {
407
- sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
408
- if (mediaSource.duration === Infinity || isNaN(mediaSource.duration)) {
409
- mediaSource.duration = 1e3;
410
- }
411
- this.logger.debug("SourceBuffer created successfully");
412
- resolve();
413
- } catch (err) {
414
- reject(new Error(`Failed to create SourceBuffer: ${err}`));
415
- }
416
- },
417
- { once: true }
418
- );
419
- });
420
- const logger = this.logger;
421
- const processQueue = async () => {
422
- if (!sourceBuffer || pendingChunks.length === 0 || isProcessingQueue) {
423
- return;
424
- }
425
- isProcessingQueue = true;
426
- try {
427
- while (pendingChunks.length > 0) {
428
- if (sourceBuffer.updating) {
429
- await new Promise((resolve) => {
430
- sourceBuffer.addEventListener("updateend", () => resolve(), {
431
- once: true
432
- });
433
- });
434
- }
435
- const nextChunk = pendingChunks.shift();
436
- if (!nextChunk)
437
- continue;
438
- try {
439
- sourceBuffer.appendBuffer(nextChunk);
440
- logger.debug(
441
- `Processed queued chunk of size ${nextChunk.byteLength}`
442
- );
443
- if (!playbackStarted && hasReceivedFirstChunk) {
444
- await tryStartPlayback();
445
- }
446
- await new Promise((resolve) => {
447
- sourceBuffer.addEventListener("updateend", () => resolve(), {
448
- once: true
449
- });
450
- });
451
- } catch (err) {
452
- logger.error("Error appending queued chunk to source buffer", err);
453
- }
454
- }
455
- } finally {
456
- isProcessingQueue = false;
457
- }
458
- };
459
- const tryStartPlayback = async () => {
460
- if (playbackStarted)
461
- return;
462
- playbackStarted = true;
463
- logger.debug("Attempting to start audio playback...");
464
- if (receivedChunksCount < 3 && audio.buffered.length > 0 && audio.buffered.end(0) < 0.5) {
465
- logger.debug("Not enough data buffered yet, delaying playback");
466
- return;
467
- }
468
- try {
469
- if (audio.readyState === 0) {
470
- logger.debug(
471
- "Audio element not ready yet, waiting for canplay event"
472
- );
473
- await new Promise((resolve) => {
474
- audio.addEventListener("canplay", () => resolve(), { once: true });
475
- });
476
- }
477
- await audio.play();
478
- logger.debug("Successfully started audio playback");
479
- } catch (err) {
480
- logger.error("Failed to start playback", err);
481
- document.addEventListener(
482
- "click",
483
- async () => {
484
- try {
485
- await audio.play();
486
- logger.debug("Started playback after user interaction");
487
- } catch (innerErr) {
488
- logger.error(
489
- "Still failed to play after user interaction",
490
- innerErr
491
- );
492
- }
493
- },
494
- { once: true }
495
- );
496
- }
497
- };
498
- const addChunkToStream = async (chunk) => {
499
- if (!sourceBuffer) {
500
- throw new Error(
501
- "Streaming context was closed or not properly initialized."
502
- );
503
- }
504
- let arrayBufferChunk;
505
- if (chunk instanceof Blob) {
506
- logger.debug("Converting Blob to ArrayBuffer");
507
- arrayBufferChunk = await chunk.arrayBuffer();
508
- } else {
509
- arrayBufferChunk = chunk;
510
- }
511
- if (!arrayBufferChunk || arrayBufferChunk.byteLength === 0) {
512
- logger.warn("Received empty chunk, skipping");
513
- return;
514
- }
515
- if (!hasReceivedFirstChunk) {
516
- hasReceivedFirstChunk = true;
517
- logger.debug(
518
- `First chunk received, size: ${arrayBufferChunk.byteLength} bytes`
519
- );
520
- }
521
- receivedChunksCount++;
522
- pendingChunks.push(arrayBufferChunk);
523
- logger.debug(
524
- `Added chunk #${receivedChunksCount} to queue (size: ${arrayBufferChunk.byteLength} bytes)`
525
- );
526
- await processQueue();
527
- if (!playbackStarted && hasReceivedFirstChunk && receivedChunksCount >= 3) {
528
- await tryStartPlayback();
529
- }
530
- };
531
- const endChunkStream = () => {
532
- if (mediaSource && mediaSource.readyState === "open") {
533
- try {
534
- if (pendingChunks.length > 0 || sourceBuffer && sourceBuffer.updating) {
535
- logger.debug("Waiting for pending chunks before ending stream");
536
- setTimeout(() => endChunkStream(), 200);
537
- return;
538
- }
539
- if (hasReceivedFirstChunk) {
540
- mediaSource.endOfStream();
541
- logger.debug("MediaSource stream ended successfully");
542
- } else {
543
- logger.warn("Stream ended without receiving any chunks");
544
- }
545
- } catch (err) {
546
- logger.error("Error ending MediaSource stream", err);
547
- }
548
- }
549
- audio.onended = null;
550
- if (audio.parentNode) {
551
- audio.parentNode.removeChild(audio);
552
- }
553
- if (this.currentAudioUrl === url) {
554
- this.currentAudioUrl = null;
555
- URL.revokeObjectURL(url);
556
- }
557
- sourceBuffer = null;
558
- };
559
- audio.onended = () => {
560
- logger.debug("Audio playback completed");
561
- endChunkStream();
562
- onComplete?.();
563
- };
564
- return {
565
- addChunkToStream,
566
- endChunkStream
567
- };
568
- }
569
- /**
570
- * Stop any ongoing HTMLAudioElement playback.
571
- */
572
- async stopPlayback() {
573
- if (this.currentHtmlAudio) {
574
- try {
575
- this.currentHtmlAudio.pause();
576
- this.currentHtmlAudio.src = "";
577
- } catch (err) {
578
- this.logger.error("Error stopping playback", err);
579
- }
580
- this.currentHtmlAudio = null;
581
- }
582
- if (this.currentAudioUrl) {
583
- URL.revokeObjectURL(this.currentAudioUrl);
584
- this.currentAudioUrl = null;
585
- }
586
- }
587
- /**
588
- * Cleans up all audio playback resources.
589
- */
590
- cleanup() {
591
- this.stopPlayback();
592
- }
593
- };
594
-
595
- // src/react/hooks/use-conversation/useConversation.ts
596
- Logger.enableGlobalLogging();
597
- function useConversation(endpoint, {
598
- onStartRecording,
599
- onStopRecording,
600
- onReceive,
601
- autoPlay = true,
602
- downstreamMode = "STREAM",
603
- onError,
604
- audioConfig = {},
605
- requestData = {},
606
- endpointConfig = {}
607
- }) {
608
- const { current: logger } = useRef(
609
- new Logger("@m4trix/core > useConversation")
610
- );
611
- const inputAudioControllerRef = useRef(void 0);
612
- const outputAudioControllerRef = useRef(
613
- void 0
614
- );
615
- const endpointAdapterRef = useRef(
616
- void 0
617
- );
618
- const [voiceAgentState, setVoiceAgentState] = useState("READY");
619
- const [error, setError] = useState(null);
620
- const handleError = useCallback(
621
- (state2, err) => {
622
- setError(err);
623
- logger.error(`Error during ${state2}:`, err);
624
- onError?.(state2, err);
625
- },
626
- [onError]
627
- );
628
- const startRecording = useCallback(() => {
629
- if (inputAudioControllerRef.current) {
630
- try {
631
- logger.debug("Starting recording");
632
- setVoiceAgentState("RECORDING");
633
- inputAudioControllerRef.current.startRecording({
634
- onError: (err) => {
635
- handleError("RECORDING", err);
636
- }
637
- });
638
- onStartRecording?.();
639
- } catch (err) {
640
- if (err instanceof Error) {
641
- handleError("RECORDING", err);
642
- }
643
- }
644
- }
645
- }, [onStartRecording, handleError]);
646
- const stopRecording = useCallback(async () => {
647
- if (inputAudioControllerRef.current) {
648
- try {
649
- logger.debug("Stopping recording");
650
- await inputAudioControllerRef.current.stopRecording({
651
- onRecordingCompleted: async (allData) => {
652
- setVoiceAgentState("PROCESSING");
653
- try {
654
- const response = await endpointAdapterRef.current?.sendVoiceFile({
655
- blob: allData,
656
- metadata: requestData
657
- });
658
- if (!response) {
659
- throw new Error("No response received from endpoint");
660
- }
661
- setVoiceAgentState("RESPONDING");
662
- if (autoPlay) {
663
- if (downstreamMode === "STREAM") {
664
- await outputAudioControllerRef.current?.playAudioStream({
665
- response,
666
- onComplete: () => {
667
- setVoiceAgentState("READY");
668
- }
669
- });
670
- } else if (downstreamMode === "DOWNLOAD") {
671
- const responseBlob = await response.blob();
672
- await outputAudioControllerRef.current?.playAudio({
673
- source: responseBlob,
674
- onComplete: () => {
675
- setVoiceAgentState("READY");
676
- }
677
- });
678
- }
679
- } else {
680
- setVoiceAgentState("READY");
681
- }
682
- onReceive?.(
683
- allData,
684
- async () => {
685
- if (outputAudioControllerRef.current) {
686
- if (downstreamMode === "STREAM") {
687
- return outputAudioControllerRef.current.playAudioStream({
688
- response,
689
- onComplete: () => {
690
- setVoiceAgentState("READY");
691
- }
692
- });
693
- } else {
694
- const responseBlob = await response.blob();
695
- return outputAudioControllerRef.current.playAudio({
696
- source: responseBlob,
697
- onComplete: () => {
698
- setVoiceAgentState("READY");
699
- }
700
- });
701
- }
702
- }
703
- },
704
- async () => {
705
- if (outputAudioControllerRef.current) {
706
- return outputAudioControllerRef.current.stopPlayback();
707
- }
708
- }
709
- );
710
- } catch (err) {
711
- if (err instanceof Error) {
712
- handleError("PROCESSING", err);
713
- }
714
- setVoiceAgentState("READY");
715
- }
716
- }
717
- });
718
- onStopRecording?.();
719
- } catch (err) {
720
- if (err instanceof Error) {
721
- handleError("RECORDING", err);
722
- }
723
- }
724
- }
725
- }, [
726
- onStopRecording,
727
- requestData,
728
- autoPlay,
729
- downstreamMode,
730
- handleError,
731
- onReceive
732
- ]);
733
- useEffect(() => {
734
- if (endpointAdapterRef.current) {
735
- return;
736
- }
737
- try {
738
- const endpointAdapter = endpointConfig.endpointAdapter ? endpointConfig.endpointAdapter : new BaseVoiceEndpointAdapter({
739
- baseUrl: endpointConfig.baseUrl,
740
- endpoint,
741
- headers: endpointConfig.headers
742
- });
743
- endpointAdapterRef.current = endpointAdapter;
744
- if (!inputAudioControllerRef.current) {
745
- inputAudioControllerRef.current = new WebAudioInputAudioController(
746
- audioConfig
747
- );
748
- }
749
- if (!outputAudioControllerRef.current) {
750
- outputAudioControllerRef.current = new AudioElementOutputAudioController();
751
- }
752
- } catch (err) {
753
- if (err instanceof Error) {
754
- handleError("READY", err);
755
- }
756
- }
757
- }, [endpoint, endpointConfig, audioConfig, handleError]);
758
- useEffect(() => {
759
- return () => {
760
- inputAudioControllerRef.current?.cleanup();
761
- outputAudioControllerRef.current?.cleanup();
762
- };
763
- }, []);
764
- return {
765
- startRecording,
766
- stopRecording,
767
- voiceAgentState,
768
- error,
769
- audioContext: inputAudioControllerRef.current?.audioContext || null
770
- };
771
- }
772
-
773
- // src/react/adapter/socket/VoiceSocketAdapter.ts
774
- var VoiceSocketAdapter = class {
775
- constructor(config) {
776
- this._isConnected = false;
777
- this.logger = new Logger("@m4trix/core > VoiceSocketAdapter");
778
- this.emitter = new Emitter();
779
- this.config = config;
780
- }
781
- on(event, listener) {
782
- this.emitter.on(event, listener);
783
- }
784
- off(event, listener) {
785
- this.emitter.off(event, listener);
786
- }
787
- once(event, listener) {
788
- this.emitter.once(event, listener);
789
- }
790
- emit(event, data) {
791
- this.emitter.emit(event, data);
792
- }
793
- isConnected() {
794
- return this._isConnected;
795
- }
796
- };
797
- var Emitter = class {
798
- constructor() {
799
- this.target = new EventTarget();
800
- }
801
- on(type, listener) {
802
- this.target.addEventListener(type, listener);
803
- }
804
- off(type, listener) {
805
- this.target.removeEventListener(type, listener);
806
- }
807
- once(type, listener) {
808
- const wrapper = (event) => {
809
- this.off(type, wrapper);
810
- listener(event.detail);
811
- };
812
- this.on(type, wrapper);
813
- }
814
- emit(type, detail) {
815
- this.target.dispatchEvent(new CustomEvent(type, { detail }));
816
- }
817
- };
818
- var VoiceSocketIOAdapter = class extends VoiceSocketAdapter {
819
- constructor(config) {
820
- super(config);
821
- this.socket = null;
822
- }
823
- async connect() {
824
- return new Promise((resolve, reject) => {
825
- if (!this.socket) {
826
- this.socket = io(this.config.baseUrl, {
827
- extraHeaders: this.config.headers,
828
- autoConnect: true
829
- });
830
- }
831
- this.socket.on("connect", () => {
832
- this._isConnected = true;
833
- this.logger.debug("Connected to socket");
834
- this.emit("connect");
835
- resolve();
836
- });
837
- this.socket.on("disconnect", () => {
838
- this._isConnected = false;
839
- this.emit("disconnect");
840
- this.logger.debug("Disconnected from socket");
841
- if (this.config.autoReconnect)
842
- this.connect();
843
- });
844
- this.socket.on("connect_error", (error) => {
845
- this.logger.error("Error connecting to socket", error);
846
- this.emit("error", error);
847
- reject(error);
848
- });
849
- this.socket.on("voice:chunk_received", (chunk) => {
850
- this.logger.debug("Received voice chunk", chunk.byteLength);
851
- this.onVoiceChunkReceived(chunk);
852
- });
853
- this.socket.on("voice:received_end_of_response_stream", () => {
854
- this.logger.debug("Received end of response stream");
855
- this.onReceivedEndOfResponseStream();
856
- });
857
- this.socket.on("voice:file_received", (blob) => {
858
- this.logger.debug("Received voice file");
859
- this.onVoiceFileReceived(blob);
860
- });
861
- this.socket.on("control-message", (message) => {
862
- this.logger.debug("Received control message", message);
863
- this.emit("control-message", message);
864
- });
865
- });
866
- }
867
- disconnect() {
868
- this.socket?.disconnect();
869
- this.socket = null;
870
- this._isConnected = false;
871
- }
872
- exposeSocket() {
873
- return this.socket;
874
- }
875
- async sendVoiceChunk(chunk, metadata) {
876
- this.logger.debug(
877
- "Sending voice chunk %i",
878
- chunk instanceof Blob ? chunk.size : chunk.byteLength
879
- );
880
- if (!this.socket || !this.isConnected)
881
- throw new Error("Socket not connected");
882
- let chunkToSend;
883
- if (chunk instanceof Blob) {
884
- chunkToSend = await chunk.arrayBuffer();
885
- } else {
886
- chunkToSend = chunk;
887
- }
888
- this.logger.debug("[Socket] Sending voice chunk", chunkToSend.byteLength);
889
- this.socket.emit("voice:send_chunk", chunkToSend, metadata);
890
- this.emit("chunk_sent", chunk);
891
- }
892
- sendVoiceFile(blob, metadata) {
893
- this.logger.debug("Sending voice file", blob, metadata);
894
- if (!this.socket || !this.isConnected)
895
- throw new Error("Socket not connected");
896
- this.socket.emit("voice:send_file", blob, metadata);
897
- this.emit("file-sent", blob);
898
- }
899
- commitVoiceMessage() {
900
- if (!this.socket || !this.isConnected)
901
- throw new Error("Socket not connected");
902
- this.socket.emit("voice:commit");
903
- }
904
- onVoiceChunkReceived(chunk) {
905
- this.emit("chunk-received", chunk);
906
- }
907
- onVoiceFileReceived(blob) {
908
- this.emit("file-received", blob);
909
- }
910
- onReceivedEndOfResponseStream() {
911
- this.emit("received-end-of-response-stream");
912
- }
913
- };
914
-
915
- // src/react/utility/audio/WebAudioOutputAudioController.ts
916
- var STREAM_SAMPLE_RATE = 24e3;
917
- var CHANNELS = 1;
918
- var SLICE_DURATION_S = 0.25;
919
- var FRAMES_PER_SLICE = Math.floor(STREAM_SAMPLE_RATE * SLICE_DURATION_S);
920
- var BYTES_PER_SLICE = FRAMES_PER_SLICE * 2;
921
- var SCHED_TOLERANCE = 0.05;
922
- var WebAudioOutputAudioController = class extends OutputAudioController {
923
- constructor() {
924
- super("@m4trix/core > WebAudioOutputAudioController");
925
- this.audioCtx = new AudioContext();
926
- this.gain = this.audioCtx.createGain();
927
- this.nextPlayTime = 0;
928
- this.activeSources = /* @__PURE__ */ new Set();
929
- this.userGestureHookAttached = false;
930
- this.gain.connect(this.audioCtx.destination);
931
- this.resetScheduler();
932
- }
933
- // ─────────────────────────────────────────────────────────────────────
934
- // One‑shot playback
935
- // ─────────────────────────────────────────────────────────────────────
936
- async playAudio({
937
- source,
938
- onComplete
939
- }) {
940
- await this.stopPlayback();
941
- const buf = await this.sourceToArrayBuffer(source);
942
- const decoded = await this.decode(buf);
943
- await this.ensureContextRunning();
944
- const src = this.createSource(decoded, this.audioCtx.currentTime);
945
- src.onended = () => {
946
- this.activeSources.delete(src);
947
- onComplete?.();
948
- };
949
- }
950
- async playAudioStream() {
951
- }
952
- // ─────────────────────────────────────────────────────────────────────
953
- // PCM streaming
954
- // ─────────────────────────────────────────────────────────────────────
955
- async initializeChunkStream({
956
- onComplete
957
- }) {
958
- await this.stopPlayback();
959
- await this.ensureContextRunning();
960
- this.resetScheduler();
961
- let streamEnded = false;
962
- let pending = new Uint8Array(0);
963
- const addChunkToStream = async (pkt) => {
964
- if (streamEnded) {
965
- this.logger.warn("Attempt to add chunk after stream ended \u2013 ignoring.");
966
- return;
967
- }
968
- const bytes = new Uint8Array(
969
- pkt instanceof Blob ? await pkt.arrayBuffer() : pkt
970
- );
971
- if (bytes.length === 0)
972
- return;
973
- const merged = new Uint8Array(pending.length + bytes.length);
974
- merged.set(pending);
975
- merged.set(bytes, pending.length);
976
- pending = merged;
977
- if (pending.length % 2 === 1)
978
- return;
979
- while (pending.length >= BYTES_PER_SLICE) {
980
- const sliceBytes = pending.slice(0, BYTES_PER_SLICE);
981
- pending = pending.slice(BYTES_PER_SLICE);
982
- const aligned = sliceBytes.buffer.slice(
983
- sliceBytes.byteOffset,
984
- sliceBytes.byteOffset + sliceBytes.byteLength
985
- );
986
- const int16 = new Int16Array(aligned);
987
- const buf = this.audioCtx.createBuffer(
988
- CHANNELS,
989
- int16.length,
990
- STREAM_SAMPLE_RATE
991
- );
992
- const data = buf.getChannelData(0);
993
- for (let i = 0; i < int16.length; i++)
994
- data[i] = int16[i] / 32768;
995
- this.scheduleBuffer(buf);
996
- }
997
- };
998
- const endChunkStream = () => {
999
- if (streamEnded)
1000
- return;
1001
- streamEnded = true;
1002
- if (onComplete) {
1003
- if (this.activeSources.size === 0)
1004
- onComplete();
1005
- else {
1006
- const last = Array.from(this.activeSources).pop();
1007
- if (last) {
1008
- const prev = last.onended;
1009
- last.onended = (e) => {
1010
- if (prev)
1011
- prev.call(last, e);
1012
- onComplete();
1013
- };
1014
- }
1015
- }
1016
- }
1017
- };
1018
- return { addChunkToStream, endChunkStream };
1019
- }
1020
- // ─────────────────────────────────────────────────────────────────────
1021
- // Buffer scheduling helpers
1022
- // ─────────────────────────────────────────────────────────────────────
1023
- scheduleBuffer(buf) {
1024
- if (this.nextPlayTime < this.audioCtx.currentTime + SCHED_TOLERANCE) {
1025
- this.nextPlayTime = this.audioCtx.currentTime + SCHED_TOLERANCE;
1026
- }
1027
- this.createSource(buf, this.nextPlayTime);
1028
- this.nextPlayTime += buf.duration;
1029
- }
1030
- createSource(buf, when) {
1031
- const src = this.audioCtx.createBufferSource();
1032
- src.buffer = buf;
1033
- src.connect(this.gain);
1034
- src.start(when);
1035
- this.activeSources.add(src);
1036
- src.onended = () => {
1037
- this.activeSources.delete(src);
1038
- };
1039
- return src;
1040
- }
1041
- resetScheduler() {
1042
- this.nextPlayTime = this.audioCtx.currentTime;
1043
- }
1044
- // ─── External resource helpers ───────────────────────────────────────
1045
- sourceToArrayBuffer(src) {
1046
- return typeof src === "string" ? fetch(src).then((r) => {
1047
- if (!r.ok)
1048
- throw new Error(`${r.status}`);
1049
- return r.arrayBuffer();
1050
- }) : src.arrayBuffer();
1051
- }
1052
- decode(buf) {
1053
- return new Promise(
1054
- (res, rej) => this.audioCtx.decodeAudioData(buf, res, rej)
1055
- );
1056
- }
1057
- // ─── Lifecycle methods ───────────────────────────────────────────────
1058
- async stopPlayback() {
1059
- for (const src of this.activeSources) {
1060
- try {
1061
- src.stop();
1062
- } catch {
1063
- }
1064
- src.disconnect();
1065
- }
1066
- this.activeSources.clear();
1067
- this.resetScheduler();
1068
- }
1069
- cleanup() {
1070
- this.stopPlayback();
1071
- if (this.audioCtx.state !== "closed")
1072
- this.audioCtx.close();
1073
- }
1074
- // ─── Autoplay‑policy helper ──────────────────────────────────────────
1075
- async ensureContextRunning() {
1076
- if (this.audioCtx.state !== "suspended")
1077
- return;
1078
- try {
1079
- await this.audioCtx.resume();
1080
- } catch {
1081
- }
1082
- if (this.audioCtx.state === "running")
1083
- return;
1084
- if (!this.userGestureHookAttached) {
1085
- this.userGestureHookAttached = true;
1086
- const resume = async () => {
1087
- try {
1088
- await this.audioCtx.resume();
1089
- } catch {
1090
- }
1091
- if (this.audioCtx.state === "running")
1092
- document.removeEventListener("click", resume);
1093
- };
1094
- document.addEventListener("click", resume);
1095
- }
1096
- }
1097
- };
1098
-
1099
- // src/react/hooks/use-conversation/useSocketConversation.ts
1100
- Logger.enableGlobalLogging();
1101
- function useSocketConversation({
1102
- scope,
1103
- onStartRecording,
1104
- onStopRecording,
1105
- onReceive,
1106
- upstreamMode = "STREAM_WHILE_TALK",
1107
- onError,
1108
- audioConfig = {},
1109
- socketConfig = {}
1110
- }) {
1111
- const { current: logger } = useRef(
1112
- new Logger("SuTr > useSocketConversation")
1113
- );
1114
- const inputAudioControllerRef = useRef(void 0);
1115
- const outputAudioControllerRef = useRef(
1116
- void 0
1117
- );
1118
- const socketAdapterRef = useRef(void 0);
1119
- const [socket, setSocket] = useState(null);
1120
- const [voiceAgentState, setVoiceAgentState] = useState("READY");
1121
- const [error, setError] = useState(null);
1122
- const shouldStreamWhileTalk = upstreamMode === "STREAM_WHILE_TALK";
1123
- const handleError = useCallback(
1124
- (state2, err) => {
1125
- setError(err);
1126
- logger.error(`Error during ${state2}:`, err);
1127
- onError?.(state2, err);
1128
- },
1129
- [onError]
1130
- );
1131
- const subscribeToSocketEventsForChunkDownstreaming = useCallback(
1132
- async (socketAdapter) => {
1133
- logger.debug("Setting up audio stream for receiving chunks");
1134
- try {
1135
- const { addChunkToStream, endChunkStream } = await outputAudioControllerRef.current.initializeChunkStream({
1136
- mimeCodec: "audio/mpeg",
1137
- onComplete: () => {
1138
- logger.debug("Audio stream playback completed");
1139
- setVoiceAgentState("READY");
1140
- }
1141
- });
1142
- let chunkCount = 0;
1143
- const chunkReceivedEmitter = async (chunk) => {
1144
- if (chunk instanceof ArrayBuffer) {
1145
- chunkCount++;
1146
- logger.debug(
1147
- `Received voice chunk #${chunkCount} from socket, size: ${chunk.byteLength} bytes`
1148
- );
1149
- if (!chunk || chunk.byteLength === 0) {
1150
- logger.warn("Received empty chunk, skipping");
1151
- return;
1152
- }
1153
- try {
1154
- await addChunkToStream(chunk);
1155
- logger.debug(
1156
- `Successfully added chunk #${chunkCount} to audio stream`
1157
- );
1158
- } catch (err) {
1159
- logger.error(
1160
- `Failed to add chunk #${chunkCount} to audio stream`,
1161
- err
1162
- );
1163
- if (err instanceof Error) {
1164
- handleError("DOWNSTREAMING", err);
1165
- }
1166
- }
1167
- }
1168
- };
1169
- socketAdapter.on("chunk-received", chunkReceivedEmitter);
1170
- const endOfStreamEmitter = () => {
1171
- logger.debug(
1172
- `Received end of stream signal after ${chunkCount} chunks, ending chunk stream`
1173
- );
1174
- endChunkStream();
1175
- setVoiceAgentState("READY");
1176
- };
1177
- socketAdapter.on("received-end-of-response-stream", endOfStreamEmitter);
1178
- return () => {
1179
- logger.debug("Cleaning up socket event listeners");
1180
- socketAdapter.off("chunk-received", chunkReceivedEmitter);
1181
- socketAdapter.off(
1182
- "received-end-of-response-stream",
1183
- endOfStreamEmitter
1184
- );
1185
- endChunkStream();
1186
- };
1187
- } catch (err) {
1188
- if (err instanceof Error) {
1189
- handleError("DOWNSTREAMING", err);
1190
- }
1191
- return () => {
1192
- };
1193
- }
1194
- },
1195
- [handleError]
1196
- );
1197
- const hookupSocketAdapter = useCallback(
1198
- async (socketAdapter) => {
1199
- logger.debug("Connecting to socket...");
1200
- try {
1201
- await socketAdapter.connect();
1202
- socketAdapter.on("connect", () => {
1203
- logger.debug("Socket adapter connected");
1204
- setVoiceAgentState("READY");
1205
- });
1206
- socketAdapter.on("disconnect", () => {
1207
- logger.debug("Socket adapter disconnected");
1208
- });
1209
- socketAdapter.on("error", (err) => {
1210
- if (err instanceof Error) {
1211
- handleError(voiceAgentState, err);
1212
- } else {
1213
- handleError(voiceAgentState, new Error("Unknown error"));
1214
- }
1215
- });
1216
- setSocket(socketAdapter.exposeSocket());
1217
- } catch (err) {
1218
- if (err instanceof Error) {
1219
- handleError("READY", err);
1220
- }
1221
- }
1222
- },
1223
- [handleError, voiceAgentState]
1224
- );
1225
- const startRecording = useCallback(() => {
1226
- if (inputAudioControllerRef.current) {
1227
- try {
1228
- logger.debug("Starting recording");
1229
- setVoiceAgentState("RECORDING");
1230
- inputAudioControllerRef.current.startRecording({
1231
- onRecordedChunk: async (chunk) => {
1232
- if (shouldStreamWhileTalk) {
1233
- try {
1234
- await socketAdapterRef.current?.sendVoiceChunk(chunk);
1235
- } catch (err) {
1236
- if (err instanceof Error) {
1237
- handleError("RECORDING", err);
1238
- }
1239
- }
1240
- }
1241
- }
1242
- });
1243
- onStartRecording?.();
1244
- } catch (err) {
1245
- if (err instanceof Error) {
1246
- handleError("RECORDING", err);
1247
- }
1248
- }
1249
- }
1250
- }, [onStartRecording, shouldStreamWhileTalk, handleError]);
1251
- const stopRecording = useCallback(async () => {
1252
- if (inputAudioControllerRef.current) {
1253
- try {
1254
- logger.debug("Stopping recording");
1255
- await inputAudioControllerRef.current.stopRecording({
1256
- onRecordingCompleted: async (allData) => {
1257
- setVoiceAgentState("PROCESSING");
1258
- try {
1259
- if (shouldStreamWhileTalk) {
1260
- logger.debug("Committing voice message");
1261
- await socketAdapterRef.current?.commitVoiceMessage();
1262
- } else {
1263
- await socketAdapterRef.current?.sendVoiceFile(allData);
1264
- }
1265
- setVoiceAgentState("DOWNSTREAMING");
1266
- await subscribeToSocketEventsForChunkDownstreaming(
1267
- socketAdapterRef.current
1268
- );
1269
- onReceive?.(
1270
- allData,
1271
- async () => {
1272
- if (outputAudioControllerRef.current) {
1273
- return outputAudioControllerRef.current.stopPlayback();
1274
- }
1275
- },
1276
- async () => {
1277
- if (outputAudioControllerRef.current) {
1278
- return outputAudioControllerRef.current.stopPlayback();
1279
- }
1280
- }
1281
- );
1282
- } catch (err) {
1283
- if (err instanceof Error) {
1284
- handleError("PROCESSING", err);
1285
- }
1286
- }
1287
- }
1288
- });
1289
- onStopRecording?.();
1290
- } catch (err) {
1291
- if (err instanceof Error) {
1292
- handleError("RECORDING", err);
1293
- }
1294
- }
1295
- }
1296
- }, [
1297
- onStopRecording,
1298
- handleError,
1299
- subscribeToSocketEventsForChunkDownstreaming,
1300
- onReceive
1301
- ]);
1302
- useEffect(() => {
1303
- if (socketAdapterRef.current) {
1304
- return;
1305
- }
1306
- try {
1307
- const socketAdapter = socketConfig.socketAdapter ? socketConfig.socketAdapter : new VoiceSocketIOAdapter({
1308
- scope,
1309
- baseUrl: socketConfig.baseUrl || "",
1310
- headers: socketConfig.headers
1311
- });
1312
- socketAdapterRef.current = socketAdapter;
1313
- if (!socketAdapter.isConnected()) {
1314
- hookupSocketAdapter(socketAdapter);
1315
- }
1316
- if (!inputAudioControllerRef.current) {
1317
- inputAudioControllerRef.current = new WebAudioInputAudioController(
1318
- audioConfig
1319
- );
1320
- }
1321
- if (!outputAudioControllerRef.current) {
1322
- outputAudioControllerRef.current = new WebAudioOutputAudioController();
1323
- }
1324
- } catch (err) {
1325
- if (err instanceof Error) {
1326
- handleError("READY", err);
1327
- }
1328
- }
1329
- }, [scope, socketConfig, hookupSocketAdapter, audioConfig, handleError]);
1330
- useEffect(() => {
1331
- return () => {
1332
- inputAudioControllerRef.current?.cleanup();
1333
- outputAudioControllerRef.current?.cleanup();
1334
- if (socketAdapterRef.current) {
1335
- socketAdapterRef.current.disconnect();
1336
- socketAdapterRef.current = void 0;
1337
- }
1338
- };
1339
- }, []);
1340
- return {
1341
- startRecording,
1342
- stopRecording,
1343
- voiceAgentState,
1344
- error,
1345
- audioContext: inputAudioControllerRef.current?.audioContext || null,
1346
- socket
1347
- };
1348
- }
1349
- var AiCursorComponentStyle = css`
1350
- :host {
1351
- --ai-local-cursor-size: var(--sk-ai-cursor-size, 1rem);
1352
- --ai-local-cursor-label-padding: var(
1353
- --sk-ai-cursor-label-padding,
1354
- 0.25rem 0.25rem
1355
- );
1356
- --ai-local-cursor-border-radius: var(--sk-ai-cursor-border-radius, 0.25rem);
1357
- --ai-local-label-offset: var(--sk-ai-cursor-label-offset, 1rem);
1358
-
1359
- --ai-local-label-font-size: var(--sk-ai-cursor-label-font-size, 12px);
1360
- --ai-local-label-font-weight: var(--sk-ai-cursor-label-font-weight, bold);
1361
- --ai-local-label-color: var(--sk-ai-cursor-label-color, white);
1362
- --ai-local-label-background-color: var(
1363
- --sk-ai-cursor-label-background-color,
1364
- black
1365
- );
1366
- --ai-local-label-border-color: var(
1367
- --sk-ai-cursor-label-border-color,
1368
- white
1369
- );
1370
- --ai-local-label-border-width: var(
1371
- --sk-ai-cursor-label-border-width,
1372
- 0.1rem
1373
- );
1374
-
1375
- color: black;
1376
- stroke: white;
1377
- position: absolute;
1378
- /* Insetting in the parent element (body) */
1379
- top: 0;
1380
- left: 0;
1381
- bottom: 0;
1382
- right: 0;
1383
- pointer-events: none;
1384
- width: var(--ai-local-cursor-size);
1385
- height: var(--ai-local-cursor-size);
1386
- }
1387
-
1388
- #cursor-graphic-parent {
1389
- position: absolute;
1390
- top: 0;
1391
- left: 0;
1392
- }
1393
-
1394
- #label-text {
1395
- position: absolute;
1396
- color: white;
1397
- font-size: 12px;
1398
- font-weight: bold;
1399
- padding: var(--ai-local-cursor-label-padding);
1400
- border-radius: var(--ai-local-cursor-border-radius);
1401
-
1402
- white-space: nowrap;
1403
- overflow: hidden;
1404
- text-overflow: ellipsis;
1405
-
1406
- width: fit-content;
1407
- min-width: fit-content;
1408
- top: var(--ai-local-label-offset);
1409
- left: var(--ai-local-label-offset);
1410
-
1411
- border: var(--ai-local-label-border-width) solid
1412
- var(--ai-local-label-border-color);
1413
- background-color: var(--ai-local-label-background-color);
1414
- color: var(--ai-local-label-color);
1415
- font-size: var(--ai-local-label-font-size);
1416
- font-weight: var(--ai-local-label-font-weight);
1417
- }
1418
- `;
1419
-
1420
- // src/ui/ai-cursor/rendering/AiCursorComponent.ts
1421
- var AiCursorComponent = class extends LitElement {
1422
- constructor() {
1423
- super();
1424
- this.eventHooks = {
1425
- defineSetPosition: () => {
1426
- },
1427
- defineAddPositionToQueue: () => {
1428
- },
1429
- definePlayQueue: () => {
1430
- },
1431
- defineSetShowCursor: () => {
1432
- }
1433
- };
1434
- this.isShowingCursor = true;
1435
- this.labelText = "AI Cursor";
1436
- this.cursorPosition = [0, 0];
1437
- this._cursorRef = createRef();
1438
- this._labelRef = createRef();
1439
- }
1440
- updated(_changedProperties) {
1441
- if (_changedProperties.has("_cursorRef")) {
1442
- if (this._cursorRef.value) {
1443
- this.hookUpCallbacks();
1444
- } else {
1445
- this._timeline?.pause();
1446
- this._timeline?.refresh();
1447
- }
1448
- }
1449
- super.updated(_changedProperties);
1450
- }
1451
- render() {
1452
- const cursorSvg = html`
1453
- <svg
1454
- width=${24}
1455
- height=${24}
1456
- viewBox="0 0 100 100"
1457
- fill="none"
1458
- xmlns="http://www.w3.org/2000/svg"
1459
- >
1460
- <g clip-path="url(#clip0_3576_285)">
1461
- <path
1462
- class="cursor-path"
1463
- d="M2.14849 7.04749C1.35153 4.07321 4.07319 1.35155 7.04747 2.14851L77.3148 20.9766C80.2891 21.7735 81.2853 25.4914 79.108 27.6687L27.6687 79.108C25.4914 81.2853 21.7735 80.2891 20.9766 77.3149L2.14849 7.04749Z"
1464
- fill="currentColor"
1465
- />
1466
- </g>
1467
- <defs>
1468
- <clipPath id="clip0_3576_285">
1469
- <rect width="100" height="100" fill="white" />
1470
- </clipPath>
1471
- </defs>
1472
- </svg>
1473
- `;
1474
- return html`
1475
- <span
1476
- id="cursor-graphic-parent"
1477
- ${ref(this._cursorRef)}
1478
- ?hidden=${!this.isShowingCursor}
1479
- >
1480
- ${cursorSvg}
1481
- <span
1482
- ${ref(this._labelRef)}
1483
- id="label-text"
1484
- ?hidden=${!this.isShowingCursor}
1485
- >${this.labelText}</span
1486
- >
1487
- </span>
1488
- `;
1489
- }
1490
- // private methods
1491
- /**
1492
- * The primary way to control the cursor is using an external API.
1493
- * This interface exposes controlling methods. The Lit Component itself is
1494
- * intended to be a controlled component.
1495
- */
1496
- hookUpCallbacks() {
1497
- const animationTarget = this._cursorRef.value;
1498
- if (!animationTarget) {
1499
- return;
1500
- }
1501
- this._timeline = createTimeline({ defaults: { duration: 750 } });
1502
- if (!this._timeline) {
1503
- return;
1504
- }
1505
- this.eventHooks.defineSetPosition((position) => {
1506
- this._timeline?.add(animationTarget, {
1507
- translateX: position[0],
1508
- translateY: position[1],
1509
- duration: 1
1510
- });
1511
- this._timeline?.play();
1512
- });
1513
- this.eventHooks.defineAddPositionToQueue((position) => {
1514
- this._timeline?.add(animationTarget, {
1515
- translateX: position[0],
1516
- translateY: position[1],
1517
- duration: 1e3
1518
- });
1519
- });
1520
- this.eventHooks.defineSetShowCursor((show) => {
1521
- this.isShowingCursor = show;
1522
- });
1523
- this.eventHooks.definePlayQueue(() => {
1524
- this._timeline?.play();
1525
- });
1526
- }
1527
- // Getters
1528
- get cursorRef() {
1529
- return this._cursorRef.value;
1530
- }
1531
- get labelRef() {
1532
- return this._labelRef.value;
1533
- }
1534
- };
1535
- // Define scoped styles right with your component, in plain CSS
1536
- AiCursorComponent.styles = AiCursorComponentStyle;
1537
- __decorateClass([
1538
- property({
1539
- type: Object
1540
- })
1541
- ], AiCursorComponent.prototype, "eventHooks", 2);
1542
- __decorateClass([
1543
- property({ type: Boolean })
1544
- ], AiCursorComponent.prototype, "isShowingCursor", 2);
1545
- __decorateClass([
1546
- property({ type: String })
1547
- ], AiCursorComponent.prototype, "labelText", 2);
1548
- __decorateClass([
1549
- property({ type: Array })
1550
- ], AiCursorComponent.prototype, "cursorPosition", 2);
1551
- __decorateClass([
1552
- state()
1553
- ], AiCursorComponent.prototype, "_cursorRef", 2);
1554
- __decorateClass([
1555
- state()
1556
- ], AiCursorComponent.prototype, "_labelRef", 2);
1557
- AiCursorComponent = __decorateClass([
1558
- customElement("ai-cursor")
1559
- ], AiCursorComponent);
1560
-
1561
- // src/ui/ai-cursor/rendering/index.ts
1562
- var mountAiCursor = (aiCursorProps) => {
1563
- const root = document.body;
1564
- const cursor = document.createElement("ai-cursor");
1565
- cursor.eventHooks = aiCursorProps.eventHooks;
1566
- root.appendChild(cursor);
1567
- };
1568
-
1569
- // src/ui/ai-cursor/AiCursor.ts
1570
- var AiCursor = class _AiCursor {
1571
- constructor() {
1572
- }
1573
- // Static constructors
1574
- static spawn() {
1575
- const newCursor = new _AiCursor();
1576
- newCursor.mount();
1577
- return newCursor;
1578
- }
1579
- jumpTo(target) {
1580
- const position = targetToPosition(target);
1581
- if (position) {
1582
- this.setPosition?.(position);
1583
- }
1584
- }
1585
- moveTo(target) {
1586
- const position = targetToPosition(target);
1587
- if (position) {
1588
- this.addPositionToQueue?.(position);
1589
- this.playQueue?.();
1590
- }
1591
- }
1592
- scheduleMoves(targets) {
1593
- targets.forEach((target) => {
1594
- const position = targetToPosition(target);
1595
- if (position) {
1596
- this.addPositionToQueue?.(position);
1597
- }
1598
- });
1599
- this.playQueue?.();
1600
- }
1601
- show() {
1602
- this.setShowCursor?.(true);
1603
- }
1604
- hide() {
1605
- this.setShowCursor?.(false);
1606
- }
1607
- mount() {
1608
- mountAiCursor({
1609
- eventHooks: {
1610
- defineSetPosition: (callback) => {
1611
- this.setPosition = callback;
1612
- },
1613
- defineAddPositionToQueue: (callback) => {
1614
- this.addPositionToQueue = callback;
1615
- },
1616
- definePlayQueue: (callback) => {
1617
- this.playQueue = callback;
1618
- },
1619
- defineSetShowCursor: (callback) => {
1620
- this.setShowCursor = callback;
1621
- }
1622
- }
1623
- });
1624
- }
1625
- };
1626
- function calculateClickPositionFromElement(element) {
1627
- const rect = element.getBoundingClientRect();
1628
- return [rect.left + rect.width / 2, rect.top + rect.height / 2];
1629
- }
1630
- function targetToPosition(target) {
1631
- if (Array.isArray(target) && target.length === 2 && typeof target[0] === "number" && typeof target[1] === "number") {
1632
- return target;
1633
- } else if (target instanceof HTMLElement) {
1634
- return calculateClickPositionFromElement(target);
1635
- } else if (typeof target === "string") {
1636
- const element = document.querySelector(target);
1637
- if (element) {
1638
- return calculateClickPositionFromElement(element);
1639
- }
1640
- }
1641
- return void 0;
1642
- }
1643
-
1644
25
  // src/stream/Pump.ts
1645
26
  var Pump = class _Pump {
1646
27
  constructor(src) {
@@ -2748,8 +1129,66 @@ var Channel = {
2748
1129
  };
2749
1130
  }
2750
1131
  };
1132
+
1133
+ // src/helper/types/noop.ts
1134
+ var asyncNoop = async () => {
1135
+ };
1136
+
1137
+ // src/matrix/agent-network/stores/inmemory-network-store.ts
1138
+ var createInMemoryNetworkStore = () => {
1139
+ const store = /* @__PURE__ */ new Map();
1140
+ return {
1141
+ storeEvent: (contextId, runId, event) => {
1142
+ let byRun = store.get(contextId);
1143
+ if (!byRun) {
1144
+ byRun = /* @__PURE__ */ new Map();
1145
+ store.set(contextId, byRun);
1146
+ }
1147
+ let events = byRun.get(runId);
1148
+ if (!events) {
1149
+ events = [];
1150
+ byRun.set(runId, events);
1151
+ }
1152
+ events.push(event);
1153
+ },
1154
+ getEvents: (contextId, runId) => {
1155
+ const events = store.get(contextId)?.get(runId);
1156
+ return events ? [...events] : [];
1157
+ },
1158
+ getContextEvents: (contextId) => {
1159
+ const byRun = store.get(contextId);
1160
+ const result = /* @__PURE__ */ new Map();
1161
+ if (byRun) {
1162
+ for (const [runId, events] of byRun) {
1163
+ result.set(runId, [...events]);
1164
+ }
1165
+ }
1166
+ return result;
1167
+ },
1168
+ getFullStore: () => {
1169
+ const result = /* @__PURE__ */ new Map();
1170
+ for (const [contextId, byRun] of store) {
1171
+ const contextMap = /* @__PURE__ */ new Map();
1172
+ for (const [runId, events] of byRun) {
1173
+ contextMap.set(runId, [...events]);
1174
+ }
1175
+ result.set(contextId, contextMap);
1176
+ }
1177
+ return result;
1178
+ },
1179
+ persist: () => asyncNoop(),
1180
+ load: () => asyncNoop()
1181
+ };
1182
+ };
1183
+
1184
+ // src/matrix/agent-network/event-plane.ts
2751
1185
  var DEFAULT_CAPACITY = 16;
2752
- var createEventPlane = (network, capacity = DEFAULT_CAPACITY) => Effect.gen(function* () {
1186
+ var createEventPlane = (options) => Effect.gen(function* () {
1187
+ const {
1188
+ network,
1189
+ capacity = DEFAULT_CAPACITY,
1190
+ store = createInMemoryNetworkStore()
1191
+ } = options;
2753
1192
  const channels = network.getChannels();
2754
1193
  const pubsubs = /* @__PURE__ */ new Map();
2755
1194
  for (const channel of channels.values()) {
@@ -2762,12 +1201,59 @@ var createEventPlane = (network, capacity = DEFAULT_CAPACITY) => Effect.gen(func
2762
1201
  throw new Error(`Channel not found: ${channel}`);
2763
1202
  return p;
2764
1203
  };
2765
- const publish = (channel, envelope) => PubSub.publish(getPubsub(channel), envelope);
2766
- const publishToChannels = (targetChannels, envelope) => Effect.all(
2767
- targetChannels.map((c) => publish(c.name, envelope)),
2768
- { concurrency: "unbounded" }
2769
- ).pipe(Effect.map((results) => results.every(Boolean)));
1204
+ const recordEvent = (envelope) => {
1205
+ const { contextId, runId } = envelope.meta;
1206
+ store.storeEvent(contextId, runId, envelope);
1207
+ };
1208
+ const publishToPubSub = (channel, envelope) => PubSub.publish(getPubsub(channel), envelope);
1209
+ const publish = (channel, envelope) => Effect.sync(() => recordEvent(envelope)).pipe(
1210
+ Effect.flatMap(() => publishToPubSub(channel, envelope)),
1211
+ Effect.withSpan("event.publish", {
1212
+ attributes: {
1213
+ "event.name": envelope.name,
1214
+ "event.payload": payloadForSpan(envelope.payload),
1215
+ channel,
1216
+ runId: envelope.meta.runId,
1217
+ contextId: envelope.meta.contextId
1218
+ }
1219
+ })
1220
+ );
1221
+ const publishToChannels = (targetChannels, envelope) => Effect.sync(() => recordEvent(envelope)).pipe(
1222
+ Effect.flatMap(
1223
+ () => Effect.all(
1224
+ targetChannels.map((c) => publishToPubSub(c.name, envelope)),
1225
+ { concurrency: "unbounded" }
1226
+ )
1227
+ ),
1228
+ Effect.map((results) => results.every(Boolean)),
1229
+ Effect.withSpan("event.publish", {
1230
+ attributes: {
1231
+ "event.name": envelope.name,
1232
+ "event.payload": payloadForSpan(envelope.payload),
1233
+ runId: envelope.meta.runId,
1234
+ contextId: envelope.meta.contextId
1235
+ }
1236
+ })
1237
+ );
2770
1238
  const subscribe = (channel) => PubSub.subscribe(getPubsub(channel));
1239
+ const getRunEvents = (runId, contextId) => {
1240
+ return store.getEvents(contextId, runId).slice();
1241
+ };
1242
+ const getContextEvents = (contextId) => {
1243
+ const byRun = store.getContextEvents(contextId);
1244
+ const map = /* @__PURE__ */ new Map();
1245
+ const all = [];
1246
+ for (const [runId, events] of byRun) {
1247
+ const readonlyEvents = events.slice();
1248
+ map.set(runId, readonlyEvents);
1249
+ all.push(...readonlyEvents);
1250
+ }
1251
+ return {
1252
+ all,
1253
+ byRun: (runId) => map.get(runId) ?? [],
1254
+ map
1255
+ };
1256
+ };
2771
1257
  const shutdown = Effect.all([...pubsubs.values()].map(PubSub.shutdown), {
2772
1258
  concurrency: "unbounded"
2773
1259
  }).pipe(Effect.asVoid);
@@ -2775,42 +1261,77 @@ var createEventPlane = (network, capacity = DEFAULT_CAPACITY) => Effect.gen(func
2775
1261
  publish,
2776
1262
  publishToChannels,
2777
1263
  subscribe,
1264
+ getRunEvents,
1265
+ getContextEvents,
2778
1266
  shutdown
2779
1267
  };
2780
1268
  });
2781
- var runSubscriber = (agent, publishesTo, dequeue, plane, emitQueue) => Effect.gen(function* () {
1269
+ function payloadForSpan(payload, maxLen = 500) {
1270
+ try {
1271
+ const s = JSON.stringify(payload);
1272
+ return s.length > maxLen ? `${s.slice(0, maxLen)}...` : s;
1273
+ } catch {
1274
+ return String(payload);
1275
+ }
1276
+ }
1277
+ var runSubscriber = (agent, publishesTo, dequeue, plane, emitQueue, channelName) => Effect.gen(function* () {
2782
1278
  const listensTo = agent.getListensTo?.() ?? [];
1279
+ const agentId = agent.getId();
2783
1280
  const processOne = () => Effect.gen(function* () {
2784
1281
  const envelope = yield* Queue.take(dequeue);
2785
1282
  if (listensTo.length > 0 && !listensTo.includes(envelope.name)) {
2786
1283
  return;
2787
1284
  }
2788
- yield* Effect.tryPromise({
2789
- try: () => agent.invoke({
2790
- triggerEvent: envelope,
2791
- emit: (userEvent) => {
2792
- const fullEnvelope = {
2793
- name: userEvent.name,
2794
- meta: envelope.meta,
2795
- payload: userEvent.payload
2796
- };
2797
- if (emitQueue) {
2798
- Effect.runPromise(
2799
- Queue.offer(emitQueue, {
2800
- channels: publishesTo,
2801
- envelope: fullEnvelope
2802
- })
2803
- ).catch(() => {
2804
- });
2805
- } else {
2806
- Effect.runFork(
2807
- plane.publishToChannels(publishesTo, fullEnvelope)
2808
- );
2809
- }
1285
+ const runEvents = plane.getRunEvents(
1286
+ envelope.meta.runId,
1287
+ envelope.meta.contextId
1288
+ );
1289
+ const contextEvents = plane.getContextEvents(envelope.meta.contextId);
1290
+ yield* Effect.withSpan("agent.listen", {
1291
+ attributes: {
1292
+ agentId,
1293
+ "event.name": envelope.name,
1294
+ "event.payload": payloadForSpan(envelope.payload),
1295
+ ...channelName !== void 0 && { channel: channelName }
1296
+ }
1297
+ })(
1298
+ Effect.withSpan("agent.invoke", {
1299
+ attributes: {
1300
+ agentId,
1301
+ "event.name": envelope.name,
1302
+ "event.payload": payloadForSpan(envelope.payload)
2810
1303
  }
2811
- }),
2812
- catch: (e) => e
2813
- });
1304
+ })(
1305
+ Effect.tryPromise({
1306
+ try: () => agent.invoke({
1307
+ triggerEvent: envelope,
1308
+ emit: (userEvent) => {
1309
+ const fullEnvelope = {
1310
+ name: userEvent.name,
1311
+ meta: envelope.meta,
1312
+ payload: userEvent.payload
1313
+ };
1314
+ if (emitQueue) {
1315
+ Effect.runPromise(
1316
+ Queue.offer(emitQueue, {
1317
+ channels: publishesTo,
1318
+ envelope: fullEnvelope
1319
+ })
1320
+ ).catch(() => {
1321
+ });
1322
+ } else {
1323
+ Effect.runFork(
1324
+ plane.publishToChannels(publishesTo, fullEnvelope)
1325
+ );
1326
+ }
1327
+ },
1328
+ runEvents,
1329
+ contextEvents
1330
+ }),
1331
+ catch: (e) => e
1332
+ })
1333
+ )
1334
+ );
2814
1335
  }).pipe(
2815
1336
  Effect.catchAllCause(
2816
1337
  (cause) => Cause.isInterrupted(cause) ? Effect.void : Effect.sync(() => {
@@ -2832,7 +1353,8 @@ var run = (network, plane, options) => Effect.gen(function* () {
2832
1353
  reg.publishesTo,
2833
1354
  dequeue,
2834
1355
  plane,
2835
- emitQueue
1356
+ emitQueue,
1357
+ channel.name
2836
1358
  );
2837
1359
  }
2838
1360
  }
@@ -2901,7 +1423,16 @@ function streamFromDequeue(take, signal, eventFilter) {
2901
1423
  };
2902
1424
  }
2903
1425
  function expose(network, options) {
2904
- const { auth, select, plane: providedPlane, onRequest, startEventName = "request" } = options;
1426
+ const {
1427
+ auth,
1428
+ select,
1429
+ plane: providedPlane,
1430
+ onRequest,
1431
+ triggerEvents,
1432
+ tracingLayer
1433
+ } = options;
1434
+ const triggerEventDef = triggerEvents?.[0];
1435
+ const triggerEventName = triggerEventDef?.name ?? "request";
2905
1436
  const channels = resolveChannels(network, select);
2906
1437
  const eventFilter = select?.events;
2907
1438
  const mainChannel = network.getMainChannel();
@@ -2912,7 +1443,7 @@ function expose(network, options) {
2912
1443
  const payload = await extractPayload(req);
2913
1444
  const signal = req.request?.signal;
2914
1445
  const program = Effect.gen(function* () {
2915
- const plane = providedPlane ?? (yield* createEventPlane(network));
1446
+ const plane = providedPlane ?? (yield* createEventPlane({ network, store: network.getStore() }));
2916
1447
  if (!providedPlane) {
2917
1448
  const emitQueue = yield* Queue.unbounded();
2918
1449
  yield* Effect.fork(
@@ -2928,25 +1459,46 @@ function expose(network, options) {
2928
1459
  yield* Effect.sleep("10 millis");
2929
1460
  }
2930
1461
  const targetChannel = mainChannel?.name ?? channels[0];
2931
- const emitStartEvent = (p) => {
2932
- const pld = p ?? payload;
1462
+ let runId = req.runId ?? crypto.randomUUID();
1463
+ let contextId = req.contextId ?? crypto.randomUUID();
1464
+ const setRunId = (id) => {
1465
+ runId = id;
1466
+ };
1467
+ const setContextId = (id) => {
1468
+ contextId = id;
1469
+ };
1470
+ const emitStartEvent = (opts) => {
1471
+ const meta = {
1472
+ runId: opts.runId,
1473
+ contextId: opts.contextId
1474
+ };
2933
1475
  const envelope = {
2934
- name: startEventName,
2935
- meta: { runId: crypto.randomUUID() },
2936
- payload: pld
1476
+ name: opts.event.name,
1477
+ meta,
1478
+ payload: opts.event.payload
2937
1479
  };
2938
- Effect.runPromise(plane.publish(targetChannel, envelope)).catch(() => {
2939
- });
1480
+ Effect.runPromise(plane.publish(targetChannel, envelope)).catch(
1481
+ () => {
1482
+ }
1483
+ );
2940
1484
  };
2941
1485
  const dequeue = yield* plane.subscribe(channels[0]);
2942
1486
  if (onRequest) {
2943
1487
  yield* Effect.tryPromise(
2944
- () => Promise.resolve(onRequest({ emitStartEvent, req, payload }))
1488
+ () => Promise.resolve(
1489
+ onRequest({
1490
+ setRunId,
1491
+ setContextId,
1492
+ emitStartEvent,
1493
+ req,
1494
+ payload
1495
+ })
1496
+ )
2945
1497
  );
2946
1498
  } else if (!providedPlane) {
2947
1499
  const envelope = {
2948
- name: startEventName,
2949
- meta: { runId: crypto.randomUUID() },
1500
+ name: triggerEventName,
1501
+ meta: { runId, contextId },
2950
1502
  payload
2951
1503
  };
2952
1504
  yield* plane.publish(targetChannel, envelope);
@@ -2959,7 +1511,8 @@ function expose(network, options) {
2959
1511
  }
2960
1512
  return stream;
2961
1513
  });
2962
- return Effect.runPromise(program.pipe(Effect.scoped));
1514
+ const runnable = tracingLayer ? program.pipe(Effect.provide(tracingLayer), Effect.scoped) : program.pipe(Effect.scoped);
1515
+ return Effect.runPromise(runnable);
2963
1516
  };
2964
1517
  return {
2965
1518
  protocol: "sse",
@@ -2991,6 +1544,7 @@ var AgentNetwork = class _AgentNetwork {
2991
1544
  this.channels = /* @__PURE__ */ new Map();
2992
1545
  this.agentRegistrations = /* @__PURE__ */ new Map();
2993
1546
  this.spawnerRegistrations = [];
1547
+ this._store = createInMemoryNetworkStore();
2994
1548
  }
2995
1549
  /* ─── Public Static Factory ─── */
2996
1550
  static setup(callback) {
@@ -3078,6 +1632,10 @@ var AgentNetwork = class _AgentNetwork {
3078
1632
  getSpawnerRegistrations() {
3079
1633
  return this.spawnerRegistrations;
3080
1634
  }
1635
+ /** Store defined at network setup time. Shared across all event planes created for this network. */
1636
+ getStore() {
1637
+ return this._store;
1638
+ }
3081
1639
  /**
3082
1640
  * Expose the network as a streamable API (e.g. SSE). Returns an ExposedAPI
3083
1641
  * that adapters (NextEndpoint, ExpressEndpoint) consume to produce streamed
@@ -3085,7 +1643,7 @@ var AgentNetwork = class _AgentNetwork {
3085
1643
  *
3086
1644
  * @example
3087
1645
  * const api = network.expose({ protocol: "sse", auth, select });
3088
- * export const GET = NextEndpoint.from(api).handler();
1646
+ * export const GET = NextEndpoint.from(api, { requestToContextId, requestToRunId }).handler();
3089
1647
  */
3090
1648
  expose(options) {
3091
1649
  return expose(this, options);
@@ -3103,7 +1661,11 @@ var AgentNetwork = class _AgentNetwork {
3103
1661
  }
3104
1662
  runScoped(network, capacity) {
3105
1663
  return Effect.gen(function* () {
3106
- const plane = yield* createEventPlane(network, capacity);
1664
+ const plane = yield* createEventPlane({
1665
+ network,
1666
+ capacity,
1667
+ store: network.getStore()
1668
+ });
3107
1669
  yield* Effect.fork(run(network, plane));
3108
1670
  return plane;
3109
1671
  });
@@ -3111,7 +1673,7 @@ var AgentNetwork = class _AgentNetwork {
3111
1673
  };
3112
1674
  var EventMetaSchema = Schema.Struct({
3113
1675
  runId: Schema.String,
3114
- contextId: Schema.optional(Schema.String),
1676
+ contextId: Schema.String,
3115
1677
  correlationId: Schema.optional(Schema.String),
3116
1678
  causationId: Schema.optional(Schema.String),
3117
1679
  ts: Schema.optional(Schema.Number)
@@ -3134,9 +1696,7 @@ var AgentNetworkEvent = {
3134
1696
  const makeBound = (meta, payload2) => Effect.runSync(
3135
1697
  decodeEnvelope({ name, meta, payload: payload2 })
3136
1698
  );
3137
- const makeEffect = (payload2) => decodePayload(payload2).pipe(
3138
- Effect.map((p) => ({ name, payload: p }))
3139
- );
1699
+ const makeEffect = (payload2) => decodePayload(payload2).pipe(Effect.map((p) => ({ name, payload: p })));
3140
1700
  const makeBoundEffect = (meta, payload2) => decodeEnvelope({ name, meta, payload: payload2 });
3141
1701
  const is = Schema.is(envelopeSchema);
3142
1702
  return {
@@ -3169,13 +1729,19 @@ var Agent = class {
3169
1729
  return __privateGet(this, _listensTo);
3170
1730
  }
3171
1731
  async invoke(options) {
3172
- const { triggerEvent, emit } = options ?? {};
1732
+ const { triggerEvent, emit, runEvents, contextEvents } = options ?? {};
3173
1733
  const emitFn = emit ?? ((_event) => {
3174
1734
  });
3175
1735
  await __privateGet(this, _logic).call(this, {
3176
1736
  params: __privateGet(this, _params),
3177
1737
  triggerEvent: triggerEvent ?? void 0,
3178
- emit: emitFn
1738
+ emit: emitFn,
1739
+ runEvents: runEvents ?? [],
1740
+ contextEvents: contextEvents ?? {
1741
+ all: [],
1742
+ byRun: () => [],
1743
+ map: /* @__PURE__ */ new Map()
1744
+ }
3179
1745
  });
3180
1746
  }
3181
1747
  getId() {
@@ -3288,14 +1854,19 @@ function toSSEStream(source, signal) {
3288
1854
 
3289
1855
  // src/matrix/io/adapters/next-endpoint.ts
3290
1856
  var NextEndpoint = {
3291
- from(api) {
1857
+ from(api, options) {
3292
1858
  if (api.protocol !== "sse") {
3293
1859
  throw new Error(`NextEndpoint: unsupported protocol "${api.protocol}"`);
3294
1860
  }
1861
+ const { requestToContextId, requestToRunId } = options;
3295
1862
  return {
3296
1863
  handler() {
3297
1864
  return async (request) => {
3298
- const req = { request };
1865
+ const req = {
1866
+ request,
1867
+ contextId: requestToContextId(request),
1868
+ runId: requestToRunId(request)
1869
+ };
3299
1870
  try {
3300
1871
  const encoder = new TextEncoder();
3301
1872
  const { readable, writable } = new TransformStream();
@@ -3340,12 +1911,13 @@ var NextEndpoint = {
3340
1911
 
3341
1912
  // src/matrix/io/adapters/express-endpoint.ts
3342
1913
  var ExpressEndpoint = {
3343
- from(api) {
1914
+ from(api, options) {
3344
1915
  if (api.protocol !== "sse") {
3345
1916
  throw new Error(
3346
1917
  `ExpressEndpoint: unsupported protocol "${api.protocol}"`
3347
1918
  );
3348
1919
  }
1920
+ const { requestToContextId, requestToRunId } = options;
3349
1921
  return {
3350
1922
  handler() {
3351
1923
  return async (req, res) => {
@@ -3354,7 +1926,9 @@ var ExpressEndpoint = {
3354
1926
  const exposeReq = {
3355
1927
  request: { signal: controller.signal },
3356
1928
  req,
3357
- res
1929
+ res,
1930
+ contextId: requestToContextId(req),
1931
+ runId: requestToRunId(req)
3358
1932
  };
3359
1933
  try {
3360
1934
  const encoder = new TextEncoder();
@@ -3386,7 +1960,79 @@ var ExpressEndpoint = {
3386
1960
  };
3387
1961
  }
3388
1962
  };
1963
+ var randomHexString = (length) => {
1964
+ const chars = "abcdef0123456789";
1965
+ let result = "";
1966
+ for (let i = 0; i < length; i++) {
1967
+ result += chars.charAt(Math.floor(Math.random() * chars.length));
1968
+ }
1969
+ return result;
1970
+ };
1971
+ var ConsoleSpan = class {
1972
+ constructor(name, parent, context, links, startTime, kind, depth) {
1973
+ this.name = name;
1974
+ this.parent = parent;
1975
+ this.context = context;
1976
+ this.startTime = startTime;
1977
+ this.kind = kind;
1978
+ this.depth = depth;
1979
+ this._tag = "Span";
1980
+ this.sampled = true;
1981
+ this.attributes = /* @__PURE__ */ new Map();
1982
+ this.links = [];
1983
+ this.traceId = parent._tag === "Some" ? parent.value.traceId : randomHexString(32);
1984
+ this.spanId = randomHexString(16);
1985
+ this.links = Array.from(links);
1986
+ this.status = { _tag: "Started", startTime };
1987
+ }
1988
+ end(endTime, exit) {
1989
+ if (this.status._tag === "Ended")
1990
+ return;
1991
+ const startTime = this.status.startTime;
1992
+ const durationNs = endTime - startTime;
1993
+ const durationMs = Number(durationNs) / 1e6;
1994
+ const indent = " ".repeat(this.depth);
1995
+ const attrs = Object.fromEntries(this.attributes);
1996
+ const status = Exit.isSuccess(exit) ? "ok" : "error";
1997
+ console.log(
1998
+ `${indent}[trace] ${this.name} ${durationMs.toFixed(2)}ms (${status})`,
1999
+ Object.keys(attrs).length > 0 ? attrs : ""
2000
+ );
2001
+ this.status = { _tag: "Ended", startTime, endTime, exit };
2002
+ }
2003
+ attribute(key, value) {
2004
+ this.attributes.set(key, value);
2005
+ }
2006
+ event(_name, _startTime, _attributes) {
2007
+ }
2008
+ addLinks(links) {
2009
+ this.links.push(...links);
2010
+ }
2011
+ };
2012
+ function getDepth(parent) {
2013
+ if (parent._tag === "None")
2014
+ return 0;
2015
+ const p = parent.value;
2016
+ if (p._tag === "ExternalSpan")
2017
+ return 0;
2018
+ return 1 + getDepth(p.parent);
2019
+ }
2020
+ var consoleTracer = Tracer.make({
2021
+ span: (name, parent, context, links, startTime, kind) => new ConsoleSpan(
2022
+ name,
2023
+ parent,
2024
+ context,
2025
+ links,
2026
+ startTime,
2027
+ kind,
2028
+ getDepth(parent)
2029
+ ),
2030
+ context: (f) => f()
2031
+ });
2032
+ var consoleTracerLayer = Layer.setTracer(
2033
+ consoleTracer
2034
+ );
3389
2035
 
3390
- export { Agent, AgentFactory, AgentNetwork, AgentNetworkEvent, AiCursor, BaseVoiceEndpointAdapter, Channel, ChannelName, ConfiguredChannel, Emitter, EventMetaSchema, ExposeAuthError, ExpressEndpoint, InputAudioController, NextEndpoint, Pump, Sink, SocketIoFactory, TransformMessages, VoiceEndpointAdapter, VoiceSocketAdapter, ensureFullWords, formatSSE, httpStreamResponse, isHttpStreamSink, toSSEStream, useConversation, useSocketConversation };
2036
+ export { Agent, AgentFactory, AgentNetwork, AgentNetworkEvent, Channel, ChannelName, ConfiguredChannel, EventMetaSchema, ExposeAuthError, ExpressEndpoint, NextEndpoint, Pump, Sink, SocketIoFactory, TransformMessages, consoleTracer, consoleTracerLayer, ensureFullWords, formatSSE, httpStreamResponse, isHttpStreamSink, toSSEStream };
3391
2037
  //# sourceMappingURL=out.js.map
3392
2038
  //# sourceMappingURL=index.js.map