booths 1.1.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -42,7 +42,7 @@ graph TD
42
42
  1. **Application Layer**: Your application integrates the Booths framework to handle conversational AI interactions.
43
43
  2. **`CoreBooth`**: The framework foundation that provides global functionality, instructions, and infrastructure that applies to all booths. It manages the overall system configuration and coordinates the interaction flow.
44
44
  3. **`InteractionProcessor`**: The engine that drives the conversation. It takes user input, runs it through the plugin lifecycle, sends it to the LLM (via the adapter), and processes the response.
45
- 4. **`LLMAdapter`**: A component that handles communication with the specific LLM provider (e.g., OpenAI). It translates requests and responses between the Booths system and the LLM's API.
45
+ 4. **`LLMAdapter`**: A component that handles communication with the specific LLM provider (e.g., OpenAI). It translates requests and responses between the Booths system and the LLM's API. Supports both traditional and streaming response modes.
46
46
  5. **Registries**: These are responsible for managing the different components of the system:
47
47
  * `BoothRegistry`: Manages `BoothConfig` objects that define the behavior of different AI agents.
48
48
  * `ToolRegistry`: Manages the tools (functions) that booths can use.
@@ -124,7 +124,13 @@ The `CoreBooth` requires an `LLMAdapter` to communicate with your chosen languag
124
124
 
125
125
  ```typescript
126
126
  // in OpenAIAdapter.ts
127
- import type { LLMAdapter, ResponseCreateParamsNonStreaming, Response } from 'booths';
127
+ import type {
128
+ LLMAdapter,
129
+ ResponseCreateParamsNonStreaming,
130
+ ResponseCreateParamsStreaming,
131
+ Response,
132
+ StreamEvent
133
+ } from 'booths';
128
134
  import OpenAI from 'openai';
129
135
 
130
136
  export class OpenAIAdapter implements LLMAdapter<Response> {
@@ -141,6 +147,24 @@ export class OpenAIAdapter implements LLMAdapter<Response> {
141
147
  async interpret(response: Response): Promise<Response> {
142
148
  return response;
143
149
  }
150
+
151
+ // Optional: Add streaming support
152
+ async *invokeStream(params: ResponseCreateParamsStreaming): AsyncIterable<Response> {
153
+ const stream = this.openai.responses.create({ ...params, model: 'gpt-4o', stream: true });
154
+ for await (const chunk of stream) {
155
+ yield chunk;
156
+ }
157
+ }
158
+
159
+ async interpretStream(chunk: Response): Promise<StreamEvent> {
160
+ // Convert OpenAI stream chunks to StreamEvents
161
+ // Implementation depends on your streaming format
162
+ return {
163
+ type: 'text_delta',
164
+ delta: chunk.choices?.[0]?.delta?.content || '',
165
+ content: chunk.choices?.[0]?.delta?.content || ''
166
+ };
167
+ }
144
168
  }
145
169
  ```
146
170
 
@@ -195,6 +219,7 @@ Plugins are classes that implement the `BoothPlugin` interface. They can execute
195
219
  - `onBeforeToolCall`: Before each individual tool call is executed _(allows modification of tool parameters, validation, and logging)_.
196
220
  - `onAfterToolCall`: After each individual tool call is successfully executed _(allows result processing, caching, and transformation)_.
197
221
  - `onToolCallError`: When a tool call encounters an error _(allows custom error handling and recovery)_.
222
+ - `onStreamEvent`: _(Optional)_ During streaming response generation, called for each stream event _(enables real-time processing and UI updates)_.
198
223
  - `shouldEndInteractionLoop`: To determine if the conversation turn is over.
199
224
  - `onAfterInteractionLoopEnd`: After the main loop has finished.
200
225
 
@@ -227,3 +252,245 @@ The `InteractionProcessor` is the engine of the system. It manages the interacti
227
252
  5. Runs the `onResponseReceived` plugin hooks to process the response (e.g., execute tools).
228
253
  6. Repeats this loop until a plugin's `shouldEndInteractionLoop` returns `true`.
229
254
  7. Runs the `onAfter...` plugin hooks for cleanup.
255
+
256
+ ## Streaming Support
257
+
258
+ The Booths framework includes comprehensive streaming support that enables real-time response generation while preserving the full plugin ecosystem and backward compatibility.
259
+
260
+ ### Overview
261
+
262
+ Streaming allows the LLM's response to be processed and displayed in real-time as it's being generated, providing a more responsive user experience. The framework handles streaming at multiple levels:
263
+
264
+ - **Real-time Events**: Stream events are emitted as content arrives
265
+ - **Plugin Integration**: Plugins can hook into streaming events for real-time processing
266
+ - **Complete Responses**: Existing plugins continue to receive complete responses
267
+ - **Automatic Fallback**: Graceful fallback to non-streaming if streaming fails
268
+
269
+ ### Enabling Streaming
270
+
271
+ Streaming can be enabled simply by setting a boolean flag when creating the `InteractionProcessor`:
272
+
273
+ ```typescript
274
+ import { InteractionProcessor, type InteractionProcessorOptions } from 'booths';
275
+
276
+ const options: InteractionProcessorOptions = {
277
+ streaming: true, // Enable streaming
278
+ fallbackToNonStreaming: true // Optional: fallback if streaming fails
279
+ };
280
+
281
+ const processor = new InteractionProcessor(
282
+ boothRegistry,
283
+ pluginRegistry,
284
+ toolRegistry,
285
+ llmAdapter, // Must implement streaming methods
286
+ options
287
+ );
288
+ ```
289
+
290
+ ### Stream Events
291
+
292
+ The streaming system emits different types of events as the response is generated:
293
+
294
+ ```typescript
295
+ export interface StreamEvent {
296
+ type: 'text_delta' | 'tool_call_start' | 'tool_call_end' | 'response_start' | 'response_end';
297
+ content?: string; // Full content for text events
298
+ delta?: string; // Incremental text for text_delta events
299
+ toolCall?: object; // Tool call information
300
+ metadata?: any; // Additional event metadata
301
+ }
302
+ ```
303
+
304
+ **Event Types:**
305
+ - `response_start`: Streaming begins
306
+ - `text_delta`: Incremental text content arrives
307
+ - `tool_call_start`: LLM begins a tool call
308
+ - `tool_call_end`: Tool call completes
309
+ - `response_end`: Streaming completes
310
+
311
+ ### Streaming Plugin Hooks
312
+
313
+ Plugins can implement the optional `onStreamEvent` hook to process stream events in real-time:
314
+
315
+ ```typescript
316
+ import type { BoothPlugin, StreamEvent, StreamContext, RepositoryUtilities } from 'booths';
317
+
318
+ export class MyStreamingPlugin implements BoothPlugin {
319
+ id = 'my-streaming-plugin';
320
+ name = 'My Streaming Plugin';
321
+ description = 'Handles streaming events';
322
+
323
+ async onStreamEvent(
324
+ utilities: RepositoryUtilities,
325
+ streamEvent: StreamEvent,
326
+ context: StreamContext
327
+ ): Promise<StreamEvent> {
328
+ // Process the stream event
329
+ if (streamEvent.type === 'text_delta') {
330
+ console.log(`Received text: ${streamEvent.delta}`);
331
+
332
+ // Optionally transform the event
333
+ return {
334
+ ...streamEvent,
335
+ delta: streamEvent.delta?.toUpperCase() // Example transformation
336
+ };
337
+ }
338
+
339
+ return streamEvent; // Pass through unchanged
340
+ }
341
+
342
+ async shouldEndInteractionLoop(): Promise<boolean> {
343
+ return false;
344
+ }
345
+ }
346
+ ```
347
+
348
+ ### Built-in Streaming Plugins
349
+
350
+ The framework includes example streaming plugins:
351
+
352
+ #### StreamingLoggerPlugin
353
+
354
+ Logs streaming events in real-time for debugging and monitoring:
355
+
356
+ ```typescript
357
+ import { StreamingLoggerPlugin } from 'booths';
358
+
359
+ const logger = new StreamingLoggerPlugin('[MyApp]');
360
+ pluginRegistry.registerPlugins([logger]);
361
+ ```
362
+
363
+ #### StreamingUIPlugin
364
+
365
+ Provides real-time UI updates with customizable callbacks:
366
+
367
+ ```typescript
368
+ import { StreamingUIPlugin } from 'booths';
369
+
370
+ const uiPlugin = new StreamingUIPlugin((event, context) => {
371
+ if (event.type === 'text_delta') {
372
+ // Update your UI with the new text
373
+ document.getElementById('response').textContent += event.delta;
374
+ }
375
+ });
376
+
377
+ pluginRegistry.registerPlugins([uiPlugin]);
378
+ ```
379
+
380
+ ### LLM Adapter Streaming Implementation
381
+
382
+ To support streaming, your LLM adapter should implement the optional streaming methods:
383
+
384
+ ```typescript
385
+ export class MyStreamingAdapter implements LLMAdapter<MyResponse> {
386
+ // Required methods
387
+ async invoke(params: ResponseCreateParamsNonStreaming): Promise<MyResponse> {
388
+ // Non-streaming implementation
389
+ }
390
+
391
+ async interpret(response: MyResponse): Promise<Response> {
392
+ // Convert to standard format
393
+ }
394
+
395
+ // Optional streaming methods
396
+ async *invokeStream(params: ResponseCreateParamsStreaming): AsyncIterable<MyResponse> {
397
+ // Yield streaming chunks
398
+ const stream = await this.llm.createStreamingResponse(params);
399
+ for await (const chunk of stream) {
400
+ yield chunk;
401
+ }
402
+ }
403
+
404
+ async interpretStream(chunk: MyResponse): Promise<StreamEvent> {
405
+ // Convert chunk to StreamEvent
406
+ return {
407
+ type: 'text_delta',
408
+ delta: chunk.delta,
409
+ content: chunk.content
410
+ };
411
+ }
412
+ }
413
+ ```
414
+
415
+ ### Stream Context
416
+
417
+ Plugins receive context information about the streaming session:
418
+
419
+ ```typescript
420
+ export interface StreamContext {
421
+ responseParams: ResponseCreateParamsNonStreaming; // Original request
422
+ streamIndex: number; // Event index in stream
423
+ totalExpectedEvents?: number; // Expected total (if known)
424
+ accumulatedResponse: Partial<Response>; // Response built so far
425
+ }
426
+ ```
427
+
428
+ ### Error Handling
429
+
430
+ The streaming system includes robust error handling:
431
+
432
+ - **Plugin Error Isolation**: Errors in streaming plugins don't break the stream
433
+ - **Automatic Fallback**: Can fallback to non-streaming mode on errors
434
+ - **Graceful Degradation**: System continues operating if streaming fails
435
+
436
+ ### Backward Compatibility
437
+
438
+ Streaming support is fully backward compatible:
439
+
440
+ - **Existing Plugins**: Continue to work unchanged
441
+ - **Complete Responses**: Plugins still receive full `Response` objects
442
+ - **Optional Implementation**: Adapters don't require streaming support
443
+ - **Default Behavior**: Non-streaming mode by default
444
+
445
+ ### Example: Complete Streaming Setup
446
+
447
+ Here's a complete example showing streaming integration:
448
+
449
+ ```typescript
450
+ import {
451
+ InteractionProcessor,
452
+ BoothRegistry,
453
+ BoothPluginRegistry,
454
+ ToolRegistry,
455
+ StreamingLoggerPlugin,
456
+ StreamingUIPlugin,
457
+ type InteractionProcessorOptions
458
+ } from 'booths';
459
+
460
+ // 1. Create streaming-enabled adapter (implement streaming methods)
461
+ const streamingAdapter = new MyStreamingLLMAdapter(apiKey);
462
+
463
+ // 2. Set up registries and booth
464
+ const testBooth = { id: 'chat-booth', role: 'Assistant', description: 'Helpful assistant' };
465
+ const boothRegistry = new BoothRegistry(testBooth);
466
+ const pluginRegistry = new BoothPluginRegistry();
467
+ const toolRegistry = new ToolRegistry();
468
+
469
+ // 3. Set up streaming plugins
470
+ const logger = new StreamingLoggerPlugin('[Chat]');
471
+ const uiUpdater = new StreamingUIPlugin((event) => {
472
+ if (event.type === 'text_delta') {
473
+ document.getElementById('chat').textContent += event.delta;
474
+ }
475
+ });
476
+
477
+ pluginRegistry.registerPlugins([logger, uiUpdater]);
478
+
479
+ // 4. Enable streaming
480
+ const streamingOptions: InteractionProcessorOptions = {
481
+ streaming: true,
482
+ fallbackToNonStreaming: true
483
+ };
484
+
485
+ const processor = new InteractionProcessor(
486
+ boothRegistry,
487
+ pluginRegistry,
488
+ toolRegistry,
489
+ streamingAdapter,
490
+ streamingOptions
491
+ );
492
+
493
+ // 5. Send message with real-time streaming
494
+ const response = await processor.send('Hello, stream this response!');
495
+ // User sees content appear in real-time, plugins receive complete response
496
+ ```
package/dist/index.d.ts CHANGED
@@ -150,6 +150,15 @@ export declare interface BoothPlugin {
150
150
  * @returns The potentially modified final response.
151
151
  */
152
152
  onAfterInteractionLoopEnd?: (interactionLoopEndArgs: RepositoryUtilities, response: Response_2) => Promise<Response_2>;
153
+ /**
154
+ * Called for each streaming event as it arrives during response generation.
155
+ * This is optional and only called when streaming is enabled.
156
+ * @param utilities - Utilities for accessing repositories.
157
+ * @param streamEvent - The streaming event that was received.
158
+ * @param context - Context information about the streaming session.
159
+ * @returns The potentially modified stream event, or void to pass through unchanged.
160
+ */
161
+ onStreamEvent?: (utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext) => Promise<StreamEvent | void>;
153
162
  }
154
163
 
155
164
  /**
@@ -191,6 +200,15 @@ export declare class BoothPluginRegistry {
191
200
  * @returns The plugin instance if found, undefined otherwise
192
201
  */
193
202
  getPluginById(pluginId: string): BoothPlugin | undefined;
203
+ /**
204
+ * Finds and returns multiple plugins by their IDs.
205
+ * Throws an error if any plugin ID is not found.
206
+ *
207
+ * @param pluginIds - Array of unique identifiers of the plugins to retrieve
208
+ * @returns Array of plugin instances
209
+ * @throws Error if any plugin ID is not registered
210
+ */
211
+ getPluginsByIds(pluginIds: string[]): BoothPlugin[];
194
212
  /**
195
213
  * Removes a plugin from the registry by its ID.
196
214
  * Throws an error if the plugin doesn't exist.
@@ -284,6 +302,17 @@ export declare class BoothPluginRegistry {
284
302
  * @returns Error result or recovery value after all plugins have processed it
285
303
  */
286
304
  runToolCallError(utilities: RepositoryUtilities, toolCall: ResponseFunctionToolCall, error: Error, context: ToolCallContext): Promise<any>;
305
+ /**
306
+ * Sequentially invokes every plugin's onStreamEvent hook.
307
+ * This is called for each streaming event during response generation,
308
+ * allowing plugins to process or modify stream events in real-time.
309
+ *
310
+ * @param utilities - Context information including booth and tool registries
311
+ * @param streamEvent - The streaming event that was received
312
+ * @param context - Context information about the streaming session
313
+ * @returns Modified stream event after all plugins have processed it
314
+ */
315
+ runStreamEvent(utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext): Promise<StreamEvent>;
287
316
  }
288
317
 
289
318
  /**
@@ -384,6 +413,15 @@ export declare class BoothRegistry {
384
413
  * @returns The booth configuration if found, undefined otherwise
385
414
  */
386
415
  getBoothById(boothId: string): BoothConfig | undefined;
416
+ /**
417
+ * Finds and returns multiple booth configurations by their IDs.
418
+ * Throws an error if any booth ID is not found.
419
+ *
420
+ * @param boothIds - Array of unique identifiers of the booths to retrieve
421
+ * @returns Array of booth configurations
422
+ * @throws Error if any booth ID is not registered
423
+ */
424
+ getBoothsByIds(boothIds: string[]): BoothConfig[];
387
425
  /**
388
426
  * Returns all registered booth configurations.
389
427
  *
@@ -747,7 +785,22 @@ export declare class InteractionProcessor<T> {
747
785
  private boothPlugins;
748
786
  private toolRegistry;
749
787
  private llmAdapter;
788
+ /**
789
+ * Generates a consistent ID for responses and messages.
790
+ * @param prefix - The prefix for the ID (e.g., 'stream', 'error', 'msg')
791
+ * @returns A unique ID string
792
+ * @private
793
+ */
794
+ private generateId;
795
+ /**
796
+ * Creates a standardized message object for responses.
797
+ * @param text - The text content for the message
798
+ * @returns A formatted message object
799
+ * @private
800
+ */
801
+ private createMessage;
750
802
  private loopLimit;
803
+ private options;
751
804
  /**
752
805
  * Creates a synthetic error response with proper structure and error details.
753
806
  * @param error - The error that occurred
@@ -763,6 +816,36 @@ export declare class InteractionProcessor<T> {
763
816
  * @private
764
817
  */
765
818
  private callLLM;
819
+ /**
820
+ * Calls the LLM in non-streaming mode.
821
+ * @param responseCreateParams - The parameters for creating the response.
822
+ * @returns A promise that resolves with the LLM's response.
823
+ * @private
824
+ */
825
+ private callLLMNonStreaming;
826
+ /**
827
+ * Calls the LLM in streaming mode, accumulating stream events into a complete response.
828
+ * @param responseCreateParams - The parameters for creating the response.
829
+ * @returns A promise that resolves with the accumulated response.
830
+ * @private
831
+ */
832
+ private callLLMStreaming;
833
+ /**
834
+ * Merges a stream event into the accumulated response.
835
+ * @param accumulated - The current accumulated response.
836
+ * @param streamEvent - The stream event to merge.
837
+ * @returns The updated accumulated response.
838
+ * @private
839
+ */
840
+ private mergeStreamEvent;
841
+ /**
842
+ * Creates a complete Response object from accumulated stream data.
843
+ * @param accumulated - The accumulated response data.
844
+ * @param originalParams - The original request parameters.
845
+ * @returns A complete Response object.
846
+ * @private
847
+ */
848
+ private finalizeAccumulatedResponse;
766
849
  /**
767
850
  * Runs the main interaction loop, sending messages to the LLM and processing
768
851
  * the responses through the registered plugins.
@@ -777,8 +860,9 @@ export declare class InteractionProcessor<T> {
777
860
  * @param boothPlugins - The registry for booth plugins.
778
861
  * @param toolRegistry - The registry for available tools.
779
862
  * @param llmAdapter - The adapter for interacting with the LLM.
863
+ * @param options - Configuration options for streaming and other behaviors.
780
864
  */
781
- constructor(boothRegistry: BoothRegistry, boothPlugins: BoothPluginRegistry, toolRegistry: ToolRegistry, llmAdapter: LLMAdapter<T>);
865
+ constructor(boothRegistry: BoothRegistry, boothPlugins: BoothPluginRegistry, toolRegistry: ToolRegistry, llmAdapter: LLMAdapter<T>, options?: InteractionProcessorOptions);
782
866
  /**
783
867
  * Sends a message to the LLM and processes the response through the interaction loop.
784
868
  * This involves running pre-loop, pre-send, response-received, and post-loop plugin hooks.
@@ -788,9 +872,23 @@ export declare class InteractionProcessor<T> {
788
872
  send(input: string | ResponseInput): Promise<Response_2>;
789
873
  }
790
874
 
875
+ /**
876
+ * Configuration options for the InteractionProcessor.
877
+ */
878
+ export declare interface InteractionProcessorOptions {
879
+ /** Enable streaming mode for LLM responses */
880
+ streaming?: boolean;
881
+ /** Fallback to non-streaming if streaming fails */
882
+ fallbackToNonStreaming?: boolean;
883
+ }
884
+
791
885
  export declare interface LLMAdapter<LLMResponse = any> {
792
886
  invoke: (responseParams: ResponseCreateParamsNonStreaming) => Promise<LLMResponse>;
793
887
  interpret: (response: LLMResponse) => Promise<Response_2>;
888
+ /** Optional method for streaming LLM responses */
889
+ invokeStream?: (responseParams: ResponseCreateParamsStreaming) => AsyncIterable<LLMResponse>;
890
+ /** Optional method for interpreting individual stream chunks into StreamEvents */
891
+ interpretStream?: (streamChunk: LLMResponse) => Promise<StreamEvent>;
794
892
  }
795
893
 
796
894
  /**
@@ -814,8 +912,24 @@ export declare type RepositoryUtilities = {
814
912
  llmAdapter: LLMAdapter<unknown>;
815
913
  };
816
914
 
915
+ export { Response_2 as Response }
916
+
817
917
  export { ResponseCreateParamsNonStreaming }
818
918
 
919
+ /**
920
+ * Response parameters for streaming requests.
921
+ * This creates a new type that has all the properties of ResponseCreateParamsNonStreaming
922
+ * but with stream: true instead of stream: false.
923
+ */
924
+ export declare type ResponseCreateParamsStreaming = Omit<ResponseCreateParamsNonStreaming, 'stream'> & {
925
+ /** Must be true for streaming requests */
926
+ stream: true;
927
+ };
928
+
929
+ export { ResponseInput }
930
+
931
+ export { ResponseInputItem }
932
+
819
933
  /**
820
934
  * Represents the result of processing a single tool call.
821
935
  */
@@ -836,6 +950,93 @@ export declare type SingleToolProcessingResult = {
836
950
  toolExecuted: boolean;
837
951
  };
838
952
 
953
+ /**
954
+ * Context information provided during streaming event processing.
955
+ */
956
+ export declare interface StreamContext {
957
+ /** The current response parameters being processed */
958
+ responseParams: ResponseCreateParamsNonStreaming;
959
+ /** Index of this stream event in the sequence */
960
+ streamIndex: number;
961
+ /** Total expected number of events (if known) */
962
+ totalExpectedEvents?: number;
963
+ /** Accumulated response content so far */
964
+ accumulatedResponse: Partial<Response_2>;
965
+ }
966
+
967
+ /**
968
+ * Represents a streaming event emitted during LLM response generation.
969
+ */
970
+ export declare interface StreamEvent {
971
+ /** Type of stream event */
972
+ type: 'text_delta' | 'tool_call_start' | 'tool_call_end' | 'response_start' | 'response_end';
973
+ /** Text content for text_delta events */
974
+ content?: string;
975
+ /** Incremental text delta for text_delta events */
976
+ delta?: string;
977
+ /** Tool call information for tool-related events */
978
+ toolCall?: ResponseFunctionToolCall;
979
+ /** Additional metadata for the event */
980
+ metadata?: Record<string, unknown>;
981
+ }
982
+
983
+ /**
984
+ * Callback function type for handling stream events in the UI.
985
+ */
986
+ export declare type StreamEventCallback = (event: StreamEvent, context: StreamContext) => void;
987
+
988
+ /**
989
+ * Example streaming plugin that logs stream events in real-time.
990
+ * This demonstrates how to implement streaming hooks in plugins.
991
+ */
992
+ export declare class StreamingLoggerPlugin implements BoothPlugin {
993
+ readonly id = "streaming-logger";
994
+ readonly name = "Streaming Logger Plugin";
995
+ readonly description = "Logs streaming events in real-time for debugging and monitoring";
996
+ private logPrefix;
997
+ constructor(logPrefix?: string);
998
+ /**
999
+ * Handle individual stream events as they arrive.
1000
+ * This allows for real-time processing and logging of streaming content.
1001
+ */
1002
+ onStreamEvent(_utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext): Promise<StreamEvent>;
1003
+ /**
1004
+ * Required method - determines whether to end the interaction loop.
1005
+ * For a logging plugin, we never want to end the loop ourselves.
1006
+ */
1007
+ shouldEndInteractionLoop(): Promise<boolean>;
1008
+ }
1009
+
1010
+ /**
1011
+ * Example streaming plugin that provides real-time UI updates.
1012
+ * This plugin demonstrates how to emit stream events to the UI layer.
1013
+ */
1014
+ export declare class StreamingUIPlugin implements BoothPlugin {
1015
+ readonly id = "streaming-ui";
1016
+ readonly name = "Streaming UI Plugin";
1017
+ readonly description = "Provides real-time UI updates during streaming responses";
1018
+ private onStreamCallback?;
1019
+ constructor(onStreamCallback?: StreamEventCallback);
1020
+ /**
1021
+ * Handle individual stream events and emit them to the UI layer.
1022
+ * This enables real-time updates to the user interface.
1023
+ */
1024
+ onStreamEvent(_utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext): Promise<StreamEvent>;
1025
+ /**
1026
+ * Set or update the stream callback for UI updates.
1027
+ */
1028
+ setStreamCallback(callback: StreamEventCallback): void;
1029
+ /**
1030
+ * Remove the stream callback.
1031
+ */
1032
+ removeStreamCallback(): void;
1033
+ /**
1034
+ * Required method - determines whether to end the interaction loop.
1035
+ * For a UI plugin, we never want to end the loop ourselves.
1036
+ */
1037
+ shouldEndInteractionLoop(): Promise<boolean>;
1038
+ }
1039
+
839
1040
  /**
840
1041
  * Context information provided during tool call execution.
841
1042
  */
@@ -986,6 +1187,15 @@ export declare class ToolRegistry {
986
1187
  * @returns The tool instance if found, undefined otherwise
987
1188
  */
988
1189
  getTool(toolName: string): ToolModule;
1190
+ /**
1191
+ * Finds and returns multiple tools by their names.
1192
+ * Throws an error if any tool name is not found (via getTool method).
1193
+ *
1194
+ * @param toolNames - Array of unique names of the tools to retrieve
1195
+ * @returns Array of tool instances
1196
+ * @throws Error if any tool name is not registered
1197
+ */
1198
+ getToolsByNames(toolNames: string[]): ToolModule[];
989
1199
  getGlobalTools(): ToolModule[];
990
1200
  /**
991
1201
  * Returns all registered tools as an array.
package/dist/index.js CHANGED
@@ -1,3 +1,4 @@
1
+ import { randomUUID as R } from "crypto";
1
2
  class p {
2
3
  /**
3
4
  * Collection of registered plugins.
@@ -41,6 +42,24 @@ class p {
41
42
  getPluginById(t) {
42
43
  return this.plugins.find((o) => o.id === t);
43
44
  }
45
+ /**
46
+ * Finds and returns multiple plugins by their IDs.
47
+ * Throws an error if any plugin ID is not found.
48
+ *
49
+ * @param pluginIds - Array of unique identifiers of the plugins to retrieve
50
+ * @returns Array of plugin instances
51
+ * @throws Error if any plugin ID is not registered
52
+ */
53
+ getPluginsByIds(t) {
54
+ const o = [];
55
+ for (const e of t) {
56
+ const r = this.getPluginById(e);
57
+ if (!r)
58
+ throw new Error(`Plugin with ID ${e} is not registered.`);
59
+ o.push(r);
60
+ }
61
+ return o;
62
+ }
44
63
  /**
45
64
  * Removes a plugin from the registry by its ID.
46
65
  * Throws an error if the plugin doesn't exist.
@@ -162,8 +181,8 @@ class p {
162
181
  */
163
182
  async runAfterToolCall(t, o, e, r) {
164
183
  let s = e;
165
- for (const l of this.plugins)
166
- l.onAfterToolCall && (s = await l.onAfterToolCall(t, o, s, r));
184
+ for (const i of this.plugins)
185
+ i.onAfterToolCall && (s = await i.onAfterToolCall(t, o, s, r));
167
186
  return s;
168
187
  }
169
188
  /**
@@ -179,10 +198,32 @@ class p {
179
198
  */
180
199
  async runToolCallError(t, o, e, r) {
181
200
  let s = `Error: ${e.message}`;
182
- for (const l of this.plugins)
183
- l.onToolCallError && (s = await l.onToolCallError(t, o, e, r));
201
+ for (const i of this.plugins)
202
+ i.onToolCallError && (s = await i.onToolCallError(t, o, e, r));
184
203
  return s;
185
204
  }
205
+ /**
206
+ * Sequentially invokes every plugin's onStreamEvent hook.
207
+ * This is called for each streaming event during response generation,
208
+ * allowing plugins to process or modify stream events in real-time.
209
+ *
210
+ * @param utilities - Context information including booth and tool registries
211
+ * @param streamEvent - The streaming event that was received
212
+ * @param context - Context information about the streaming session
213
+ * @returns Modified stream event after all plugins have processed it
214
+ */
215
+ async runStreamEvent(t, o, e) {
216
+ let r = o;
217
+ for (const s of this.plugins)
218
+ if (s.onStreamEvent)
219
+ try {
220
+ const i = await s.onStreamEvent(t, r, e);
221
+ i && (r = i);
222
+ } catch (i) {
223
+ console.error(`Error in plugin ${s.id} during stream event processing:`, i);
224
+ }
225
+ return r;
226
+ }
186
227
  }
187
228
  const u = {
188
229
  id: "orchestrator",
@@ -219,7 +260,7 @@ const u = {
219
260
  - User: "I need help" → "What specifically would you like help with?" → then route based on response
220
261
  `
221
262
  };
222
- class R {
263
+ class B {
223
264
  /**
224
265
  * Creates a new booth registry with a specified base booth configuration.
225
266
  *
@@ -340,6 +381,24 @@ class R {
340
381
  getBoothById(t) {
341
382
  return this.booths[t];
342
383
  }
384
+ /**
385
+ * Finds and returns multiple booth configurations by their IDs.
386
+ * Throws an error if any booth ID is not found.
387
+ *
388
+ * @param boothIds - Array of unique identifiers of the booths to retrieve
389
+ * @returns Array of booth configurations
390
+ * @throws Error if any booth ID is not registered
391
+ */
392
+ getBoothsByIds(t) {
393
+ const o = [];
394
+ for (const e of t) {
395
+ const r = this.getBoothById(e);
396
+ if (!r)
397
+ throw new Error(`Booth with ID ${e} is not registered.`);
398
+ o.push(r);
399
+ }
400
+ return o;
401
+ }
343
402
  /**
344
403
  * Returns all registered booth configurations.
345
404
  *
@@ -398,18 +457,54 @@ class R {
398
457
  ).length <= 1 && this.hasOrchestrator && this.disableMultiBoothMode();
399
458
  }
400
459
  }
401
- class B {
460
+ class T {
402
461
  /**
403
462
  * Creates an instance of InteractionProcessor.
404
463
  * @param boothRegistry - The registry for booth configurations.
405
464
  * @param boothPlugins - The registry for booth plugins.
406
465
  * @param toolRegistry - The registry for available tools.
407
466
  * @param llmAdapter - The adapter for interacting with the LLM.
467
+ * @param options - Configuration options for streaming and other behaviors.
408
468
  */
409
- constructor(t, o, e, r) {
410
- this.boothRegistry = t, this.boothPlugins = o, this.toolRegistry = e, this.llmAdapter = r;
469
+ constructor(t, o, e, r, s) {
470
+ this.boothRegistry = t, this.boothPlugins = o, this.toolRegistry = e, this.llmAdapter = r, this.options = {
471
+ streaming: !1,
472
+ fallbackToNonStreaming: !0,
473
+ ...s
474
+ };
475
+ }
476
+ /**
477
+ * Generates a consistent ID for responses and messages.
478
+ * @param prefix - The prefix for the ID (e.g., 'stream', 'error', 'msg')
479
+ * @returns A unique ID string
480
+ * @private
481
+ */
482
+ generateId(t) {
483
+ return `${t}_${Date.now()}_${R()}`;
484
+ }
485
+ /**
486
+ * Creates a standardized message object for responses.
487
+ * @param text - The text content for the message
488
+ * @returns A formatted message object
489
+ * @private
490
+ */
491
+ createMessage(t) {
492
+ return {
493
+ id: this.generateId("msg"),
494
+ content: [
495
+ {
496
+ type: "output_text",
497
+ text: t,
498
+ annotations: []
499
+ }
500
+ ],
501
+ role: "assistant",
502
+ status: "completed",
503
+ type: "message"
504
+ };
411
505
  }
412
506
  loopLimit = 10;
507
+ options;
413
508
  /**
414
509
  * Creates a synthetic error response with proper structure and error details.
415
510
  * @param error - The error that occurred
@@ -425,7 +520,7 @@ class B {
425
520
  if (r && (s.code = r), o.model === void 0)
426
521
  throw new Error("Model must be specified in response parameters for error handling.");
427
522
  return {
428
- id: `error_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
523
+ id: this.generateId("error"),
429
524
  created_at: Math.floor(Date.now() / 1e3),
430
525
  output_text: "An error occurred while communicating with the language model.",
431
526
  error: s,
@@ -435,19 +530,7 @@ class B {
435
530
  model: o.model,
436
531
  object: "response",
437
532
  output: [
438
- {
439
- id: `msg_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
440
- content: [
441
- {
442
- type: "output_text",
443
- text: `Error: ${e}. Please try again or contact support if the issue persists.`,
444
- annotations: []
445
- }
446
- ],
447
- role: "assistant",
448
- status: "completed",
449
- type: "message"
450
- }
533
+ this.createMessage(`Error: ${e}. Please try again or contact support if the issue persists.`)
451
534
  ],
452
535
  parallel_tool_calls: o.parallel_tool_calls || !1,
453
536
  temperature: o.temperature || null,
@@ -464,6 +547,21 @@ class B {
464
547
  * @private
465
548
  */
466
549
  async callLLM(t) {
550
+ if (this.options.streaming && this.llmAdapter.invokeStream && this.llmAdapter.interpretStream)
551
+ try {
552
+ return await this.callLLMStreaming(t);
553
+ } catch (o) {
554
+ return console.error("Error calling LLM with streaming:", o), this.options.fallbackToNonStreaming ? (console.warn("Falling back to non-streaming mode"), await this.callLLMNonStreaming(t)) : this.createErrorResponse(o, t);
555
+ }
556
+ return await this.callLLMNonStreaming(t);
557
+ }
558
+ /**
559
+ * Calls the LLM in non-streaming mode.
560
+ * @param responseCreateParams - The parameters for creating the response.
561
+ * @returns A promise that resolves with the LLM's response.
562
+ * @private
563
+ */
564
+ async callLLMNonStreaming(t) {
467
565
  try {
468
566
  const o = await this.llmAdapter.invoke(t);
469
567
  return await this.llmAdapter.interpret(o);
@@ -471,6 +569,91 @@ class B {
471
569
  return console.error("Error calling LLM:", o), this.createErrorResponse(o, t);
472
570
  }
473
571
  }
572
+ /**
573
+ * Calls the LLM in streaming mode, accumulating stream events into a complete response.
574
+ * @param responseCreateParams - The parameters for creating the response.
575
+ * @returns A promise that resolves with the accumulated response.
576
+ * @private
577
+ */
578
+ async callLLMStreaming(t) {
579
+ if (!this.llmAdapter.invokeStream || !this.llmAdapter.interpretStream)
580
+ throw new Error("Adapter does not support streaming");
581
+ const o = {
582
+ ...t,
583
+ stream: !0
584
+ }, e = this.llmAdapter.invokeStream(o);
585
+ let r = {
586
+ output: [],
587
+ output_text: ""
588
+ }, s = 0;
589
+ for await (const i of e) {
590
+ const l = await this.llmAdapter.interpretStream(i), h = {
591
+ responseParams: t,
592
+ streamIndex: s,
593
+ accumulatedResponse: r
594
+ }, a = await this.boothPlugins.runStreamEvent(
595
+ {
596
+ toolRegistry: this.toolRegistry,
597
+ boothRegistry: this.boothRegistry,
598
+ pluginRegistry: this.boothPlugins,
599
+ llmAdapter: this.llmAdapter
600
+ },
601
+ l,
602
+ h
603
+ );
604
+ r = this.mergeStreamEvent(r, a), s++;
605
+ }
606
+ return this.finalizeAccumulatedResponse(r, t);
607
+ }
608
+ /**
609
+ * Merges a stream event into the accumulated response.
610
+ * @param accumulated - The current accumulated response.
611
+ * @param streamEvent - The stream event to merge.
612
+ * @returns The updated accumulated response.
613
+ * @private
614
+ */
615
+ mergeStreamEvent(t, o) {
616
+ if (!o || !o.type)
617
+ return t;
618
+ switch (o.type) {
619
+ case "text_delta":
620
+ o.delta && (t.output_text = (t.output_text || "") + o.delta);
621
+ break;
622
+ case "tool_call_start":
623
+ o.toolCall && (t.output = t.output || [], t.output.push(o.toolCall));
624
+ break;
625
+ }
626
+ return t;
627
+ }
628
+ /**
629
+ * Creates a complete Response object from accumulated stream data.
630
+ * @param accumulated - The accumulated response data.
631
+ * @param originalParams - The original request parameters.
632
+ * @returns A complete Response object.
633
+ * @private
634
+ */
635
+ finalizeAccumulatedResponse(t, o) {
636
+ return {
637
+ id: this.generateId("stream"),
638
+ created_at: Math.floor(Date.now() / 1e3),
639
+ output_text: t.output_text || "",
640
+ error: null,
641
+ incomplete_details: null,
642
+ instructions: null,
643
+ metadata: null,
644
+ model: o.model || "unknown",
645
+ object: "response",
646
+ output: t.output || [
647
+ this.createMessage(t.output_text || "")
648
+ ],
649
+ parallel_tool_calls: o.parallel_tool_calls || !1,
650
+ temperature: o.temperature || null,
651
+ tool_choice: o.tool_choice || "auto",
652
+ tools: o.tools || [],
653
+ top_p: o.top_p || null,
654
+ status: "completed"
655
+ };
656
+ }
474
657
  /**
475
658
  * Runs the main interaction loop, sending messages to the LLM and processing
476
659
  * the responses through the registered plugins.
@@ -552,7 +735,7 @@ class B {
552
735
  ), r;
553
736
  }
554
737
  }
555
- const T = {
738
+ const I = {
556
739
  id: "summarizer",
557
740
  role: 'You are a highly skilled summarization AI. Your task is to read a conversation history and provide a concise, neutral, and objective summary. The summary should capture the key points, decisions made, and any unresolved questions. It must be written from a third-person perspective and should be clear enough for another AI assistant to understand the full context and continue the conversation seamlessly without needing the original transcript. Do not add any conversational fluff or introductory phrases like "Here is the summary:".',
558
741
  description: "A specialized booth for summarizing conversation histories."
@@ -608,7 +791,7 @@ ${o}
608
791
  }
609
792
  };
610
793
  }
611
- class v {
794
+ class x {
612
795
  /**
613
796
  * The sessionHistory variable stores the conversation history between the user and the booth system.
614
797
  * It is initialized as an empty array and will be populated with messages exchanged during the interaction.
@@ -704,13 +887,13 @@ class v {
704
887
  async onResponseReceived(t, o, e) {
705
888
  let s = [...o.input, ...e?.output ?? []];
706
889
  if (this.responseContainsBoothChange(e)) {
707
- const i = `Please summarize the following conversation history:
890
+ const l = `Please summarize the following conversation history:
708
891
 
709
- ${JSON.stringify(this.sessionHistory)}`, d = (await A(t.llmAdapter, T).callProcessor.send(i)).output_text, g = s.filter((h) => "role" in h && h.role === "user").pop(), b = {
892
+ ${JSON.stringify(this.sessionHistory)}`, g = (await M(t.llmAdapter, I).callProcessor.send(l)).output_text, d = s.filter((c) => "role" in c && c.role === "user").pop(), b = {
710
893
  role: "developer",
711
- content: `A conversation summary up to this point: ${d}`
712
- }, _ = s.filter((h) => !("role" in h && h.role === "user" || "type" in h && h.type === "message"));
713
- this.sessionHistory = g ? [..._, b, g] : [..._, b], s = this.sessionHistory;
894
+ content: `A conversation summary up to this point: ${g}`
895
+ }, _ = s.filter((c) => !("role" in c && c.role === "user" || "type" in c && c.type === "message"));
896
+ this.sessionHistory = d ? [..._, b, d] : [..._, b], s = this.sessionHistory;
714
897
  } else
715
898
  this.sessionHistory = s;
716
899
  return {
@@ -728,7 +911,7 @@ ${JSON.stringify(this.sessionHistory)}`, d = (await A(t.llmAdapter, T).callProce
728
911
  return !1;
729
912
  }
730
913
  }
731
- class E {
914
+ class S {
732
915
  /**
733
916
  * Unique identifier for this plugin instance.
734
917
  * @private
@@ -774,12 +957,12 @@ class E {
774
957
  const e = t.boothRegistry;
775
958
  let s = e.baseBoothConfig.description;
776
959
  if (e.isMultiBoothMode) {
777
- const l = e.orchestratorBoothConfig, i = e.currentContextBoothConfig;
960
+ const i = e.orchestratorBoothConfig, l = e.currentContextBoothConfig;
778
961
  s += `
779
962
 
780
- ${l.description}`, i.id !== l.id && (s += `
963
+ ${i.description}`, l.id !== i.id && (s += `
781
964
 
782
- ${i.description}`);
965
+ ${l.description}`);
783
966
  }
784
967
  return { ...o, instructions: s };
785
968
  }
@@ -831,6 +1014,22 @@ class w {
831
1014
  throw new Error(`Tool with name ${t} is not registered.`);
832
1015
  return o;
833
1016
  }
1017
+ /**
1018
+ * Finds and returns multiple tools by their names.
1019
+ * Throws an error if any tool name is not found (via getTool method).
1020
+ *
1021
+ * @param toolNames - Array of unique names of the tools to retrieve
1022
+ * @returns Array of tool instances
1023
+ * @throws Error if any tool name is not registered
1024
+ */
1025
+ getToolsByNames(t) {
1026
+ const o = [];
1027
+ for (const e of t) {
1028
+ const r = this.getTool(e);
1029
+ o.push(r);
1030
+ }
1031
+ return o;
1032
+ }
834
1033
  getGlobalTools() {
835
1034
  return Array.from(this.tools.values()).filter((t) => t.global);
836
1035
  }
@@ -873,7 +1072,7 @@ class w {
873
1072
  this.tools.delete(t);
874
1073
  }
875
1074
  }
876
- function C(n) {
1075
+ function v(n) {
877
1076
  switch (n.type) {
878
1077
  case "function":
879
1078
  return `function:${n.name}`;
@@ -896,15 +1095,15 @@ function C(n) {
896
1095
  return `${n.type}:${JSON.stringify(n)}`;
897
1096
  }
898
1097
  }
899
- function I(n) {
1098
+ function C(n) {
900
1099
  const t = /* @__PURE__ */ new Set(), o = [];
901
1100
  for (const e of n) {
902
- const r = C(e);
1101
+ const r = v(e);
903
1102
  t.has(r) || (t.add(r), o.push(e));
904
1103
  }
905
1104
  return o;
906
1105
  }
907
- class x {
1106
+ class E {
908
1107
  description = "A plugin to aggregate and provide tools from base and context booths.";
909
1108
  id = "tool-provider";
910
1109
  name = "Tool Provider Plugin";
@@ -917,18 +1116,18 @@ class x {
917
1116
  * @returns The updated response parameters with the aggregated list of tools.
918
1117
  */
919
1118
  async onBeforeMessageSend(t, o) {
920
- const e = t.boothRegistry.baseBoothConfig, r = t.boothRegistry.currentContextBoothConfig, i = [...e.tools || [], ...r?.tools || []].filter((a, d, g) => g.indexOf(a) === d).map(
1119
+ const e = t.boothRegistry.baseBoothConfig, r = t.boothRegistry.currentContextBoothConfig, l = [...e.tools || [], ...r?.tools || []].filter((a, g, d) => d.indexOf(a) === g).map(
921
1120
  (a) => t.toolRegistry.getTool(a)
922
1121
  );
923
- if (e.mcp && i.push(...e.mcp), r?.mcp && i.push(...r.mcp), t.boothRegistry.isMultiBoothMode) {
1122
+ if (e.mcp && l.push(...e.mcp), r?.mcp && l.push(...r.mcp), t.boothRegistry.isMultiBoothMode) {
924
1123
  const a = y(t.boothRegistry);
925
- i.push(a);
1124
+ l.push(a);
926
1125
  }
927
- i.push(...t.toolRegistry.getGlobalTools());
928
- const c = I(i);
1126
+ l.push(...t.toolRegistry.getGlobalTools());
1127
+ const h = C(l);
929
1128
  return {
930
1129
  ...o,
931
- tools: c
1130
+ tools: h
932
1131
  };
933
1132
  }
934
1133
  /**
@@ -971,16 +1170,16 @@ class m {
971
1170
  call_id: r.call_id,
972
1171
  output: `Error: Tool '${r.name}' does not have an 'execute' method.`
973
1172
  };
974
- const l = await s.execute(JSON.parse(r.arguments)), i = await t.pluginRegistry.runAfterToolCall(
1173
+ const i = await s.execute(JSON.parse(r.arguments)), l = await t.pluginRegistry.runAfterToolCall(
975
1174
  t,
976
1175
  r,
977
- l,
1176
+ i,
978
1177
  e
979
1178
  );
980
1179
  return {
981
1180
  type: "function_call_output",
982
1181
  call_id: r.call_id,
983
- output: JSON.stringify(i)
1182
+ output: JSON.stringify(l)
984
1183
  };
985
1184
  } catch (r) {
986
1185
  console.error(`Error executing tool ${o.name}:`, r);
@@ -1015,22 +1214,22 @@ class m {
1015
1214
  const r = e?.output ?? [], s = m.extractFunctionCalls(r);
1016
1215
  if (!s.length)
1017
1216
  return o;
1018
- const l = [];
1019
- for (let i = 0; i < s.length; i++) {
1020
- const c = s[i];
1021
- if (t.toolRegistry.isLocalTool(c.name))
1217
+ const i = [];
1218
+ for (let l = 0; l < s.length; l++) {
1219
+ const h = s[l];
1220
+ if (t.toolRegistry.isLocalTool(h.name))
1022
1221
  continue;
1023
1222
  const a = {
1024
1223
  responseParams: o,
1025
1224
  response: e,
1026
- toolCallIndex: i,
1225
+ toolCallIndex: l,
1027
1226
  totalToolCalls: s.length
1028
- }, d = await this.executeToolCall(t, c, a);
1029
- l.push(d);
1227
+ }, g = await this.executeToolCall(t, h, a);
1228
+ i.push(g);
1030
1229
  }
1031
1230
  return {
1032
1231
  ...o,
1033
- input: [...o.input, ...l]
1232
+ input: [...o.input, ...i]
1034
1233
  };
1035
1234
  }
1036
1235
  /**
@@ -1042,7 +1241,7 @@ class m {
1042
1241
  return !1;
1043
1242
  }
1044
1243
  }
1045
- class M {
1244
+ class A {
1046
1245
  description = "A plugin to ensure the interaction loop can be finished.";
1047
1246
  id = "finish-turn-plugin";
1048
1247
  name = "Finish Turn Plugin";
@@ -1099,16 +1298,99 @@ class M {
1099
1298
  return o;
1100
1299
  }
1101
1300
  }
1102
- function A(n, t) {
1103
- const o = new R(t), e = new w(), r = new p();
1104
- return new P({
1301
+ class $ {
1302
+ id = "streaming-logger";
1303
+ name = "Streaming Logger Plugin";
1304
+ description = "Logs streaming events in real-time for debugging and monitoring";
1305
+ logPrefix;
1306
+ constructor(t = "[StreamingLogger]") {
1307
+ this.logPrefix = t;
1308
+ }
1309
+ /**
1310
+ * Handle individual stream events as they arrive.
1311
+ * This allows for real-time processing and logging of streaming content.
1312
+ */
1313
+ async onStreamEvent(t, o, e) {
1314
+ switch (o.type) {
1315
+ case "response_start":
1316
+ console.log(`${this.logPrefix} Stream started`);
1317
+ break;
1318
+ case "text_delta":
1319
+ console.log(`${this.logPrefix} Text chunk [${e.streamIndex}]: "${o.delta}"`);
1320
+ break;
1321
+ case "tool_call_start":
1322
+ console.log(`${this.logPrefix} Tool call started: ${o.toolCall?.name}`);
1323
+ break;
1324
+ case "tool_call_end":
1325
+ console.log(`${this.logPrefix} Tool call completed: ${o.toolCall?.name}`);
1326
+ break;
1327
+ case "response_end":
1328
+ console.log(`${this.logPrefix} Stream completed after ${e.streamIndex} events`);
1329
+ break;
1330
+ default:
1331
+ console.log(`${this.logPrefix} Stream event [${e.streamIndex}]: ${o.type}`);
1332
+ }
1333
+ return o;
1334
+ }
1335
+ /**
1336
+ * Required method - determines whether to end the interaction loop.
1337
+ * For a logging plugin, we never want to end the loop ourselves.
1338
+ */
1339
+ async shouldEndInteractionLoop() {
1340
+ return !1;
1341
+ }
1342
+ }
1343
+ class k {
1344
+ id = "streaming-ui";
1345
+ name = "Streaming UI Plugin";
1346
+ description = "Provides real-time UI updates during streaming responses";
1347
+ onStreamCallback;
1348
+ constructor(t) {
1349
+ this.onStreamCallback = t;
1350
+ }
1351
+ /**
1352
+ * Handle individual stream events and emit them to the UI layer.
1353
+ * This enables real-time updates to the user interface.
1354
+ */
1355
+ async onStreamEvent(t, o, e) {
1356
+ this.onStreamCallback && o.type === "text_delta" && this.onStreamCallback(o, e);
1357
+ let r = o;
1358
+ return o.type === "text_delta" && o.delta && (r = {
1359
+ ...o,
1360
+ // Example: add HTML escaping (though this should be done in the UI layer)
1361
+ delta: o.delta
1362
+ }), r;
1363
+ }
1364
+ /**
1365
+ * Set or update the stream callback for UI updates.
1366
+ */
1367
+ setStreamCallback(t) {
1368
+ this.onStreamCallback = t;
1369
+ }
1370
+ /**
1371
+ * Remove the stream callback.
1372
+ */
1373
+ removeStreamCallback() {
1374
+ this.onStreamCallback = void 0;
1375
+ }
1376
+ /**
1377
+ * Required method - determines whether to end the interaction loop.
1378
+ * For a UI plugin, we never want to end the loop ourselves.
1379
+ */
1380
+ async shouldEndInteractionLoop() {
1381
+ return !1;
1382
+ }
1383
+ }
1384
+ function M(n, t) {
1385
+ const o = new B(t), e = new w(), r = new p();
1386
+ return new L({
1105
1387
  llmAdapter: n,
1106
1388
  booths: o,
1107
1389
  tools: e,
1108
1390
  boothPlugins: r
1109
1391
  });
1110
1392
  }
1111
- class P {
1393
+ class L {
1112
1394
  /**
1113
1395
  * Represents a registry that maintains a collection of plugins for a booth system.
1114
1396
  * The boothPluginRegistry is used to manage and access plugins that enhance
@@ -1175,12 +1457,12 @@ class P {
1175
1457
  this.toolRegistry.registerTools([o]);
1176
1458
  }
1177
1459
  this.systemPluginsRegistry = new p(), this.systemPluginsRegistry.registerPlugins([
1178
- new v(t.sessionHistory),
1460
+ new x(t.sessionHistory),
1461
+ new S(),
1179
1462
  new E(),
1180
- new x(),
1181
1463
  new m(),
1182
- new M()
1183
- ]), this.systemPluginsRegistry.registerPlugins(this.boothPluginRegistry.getPlugins()), this.callProcessor = new B(
1464
+ new A()
1465
+ ]), this.systemPluginsRegistry.registerPlugins(this.boothPluginRegistry.getPlugins()), this.callProcessor = new T(
1184
1466
  this.boothRegistry,
1185
1467
  this.systemPluginsRegistry,
1186
1468
  this.toolRegistry,
@@ -1190,15 +1472,17 @@ class P {
1190
1472
  }
1191
1473
  export {
1192
1474
  p as BoothPluginRegistry,
1193
- R as BoothRegistry,
1194
- E as ContextProviderPlugin,
1195
- v as ConversationHistoryPlugin,
1196
- P as CoreBooth,
1197
- M as FinishTurnPlugin,
1198
- B as InteractionProcessor,
1475
+ B as BoothRegistry,
1476
+ S as ContextProviderPlugin,
1477
+ x as ConversationHistoryPlugin,
1478
+ L as CoreBooth,
1479
+ A as FinishTurnPlugin,
1480
+ T as InteractionProcessor,
1481
+ $ as StreamingLoggerPlugin,
1482
+ k as StreamingUIPlugin,
1199
1483
  m as ToolExecutorPlugin,
1200
- x as ToolProviderPlugin,
1484
+ E as ToolProviderPlugin,
1201
1485
  w as ToolRegistry,
1202
- A as createCoreBooth,
1486
+ M as createCoreBooth,
1203
1487
  y as createRouteToBoothTool
1204
1488
  };
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "booths",
3
3
  "private": false,
4
- "version": "1.1.0",
4
+ "version": "1.3.0",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
7
7
  "module": "./dist/index.js",
@@ -16,14 +16,20 @@
16
16
  }
17
17
  },
18
18
  "scripts": {
19
- "build-pack": "npm install --package-lock-only && npm run build && npm pack",
19
+ "build:pack": "npm install --package-lock-only && npm run build && npm pack",
20
20
  "build": "tsc && vite build",
21
21
  "format": "prettier --write \"src/**/*.{js,ts,json,css,scss,md}\"",
22
- "typecheck": "tsc --noEmit"
22
+ "typecheck": "tsc --noEmit",
23
+ "test": "vitest",
24
+ "test:watch": "vitest --watch",
25
+ "test:ui": "vitest --ui",
26
+ "test:coverage": "vitest --coverage"
23
27
  },
24
28
  "devDependencies": {
25
29
  "@eslint/js": "^9.30.1",
26
30
  "@types/node": "^24.0.11",
31
+ "@vitest/coverage-v8": "^3.2.4",
32
+ "@vitest/ui": "^3.2.4",
27
33
  "dotenv": "^17.2.0",
28
34
  "eslint": "^9.30.1",
29
35
  "eslint-config-prettier": "^10.1.5",
@@ -33,9 +39,11 @@
33
39
  "ts-node": "^10.9.2",
34
40
  "typescript": "~5.8.3",
35
41
  "typescript-eslint": "^8.36.0",
42
+ "vi-fetch": "^0.8.0",
36
43
  "vite": "^6.3.5",
37
44
  "vite-plugin-dts": "^4.5.4",
38
- "vite-tsconfig-paths": "^5.1.4"
45
+ "vite-tsconfig-paths": "^5.1.4",
46
+ "vitest": "^3.2.4"
39
47
  },
40
48
  "peerDependencies": {
41
49
  "openai": "^5.8.2"