booths 1.2.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -42,7 +42,7 @@ graph TD
42
42
  1. **Application Layer**: Your application integrates the Booths framework to handle conversational AI interactions.
43
43
  2. **`CoreBooth`**: The framework foundation that provides global functionality, instructions, and infrastructure that applies to all booths. It manages the overall system configuration and coordinates the interaction flow.
44
44
  3. **`InteractionProcessor`**: The engine that drives the conversation. It takes user input, runs it through the plugin lifecycle, sends it to the LLM (via the adapter), and processes the response.
45
- 4. **`LLMAdapter`**: A component that handles communication with the specific LLM provider (e.g., OpenAI). It translates requests and responses between the Booths system and the LLM's API.
45
+ 4. **`LLMAdapter`**: A component that handles communication with the specific LLM provider (e.g., OpenAI). It translates requests and responses between the Booths system and the LLM's API. Supports both traditional and streaming response modes.
46
46
  5. **Registries**: These are responsible for managing the different components of the system:
47
47
  * `BoothRegistry`: Manages `BoothConfig` objects that define the behavior of different AI agents.
48
48
  * `ToolRegistry`: Manages the tools (functions) that booths can use.
@@ -124,7 +124,13 @@ The `CoreBooth` requires an `LLMAdapter` to communicate with your chosen languag
124
124
 
125
125
  ```typescript
126
126
  // in OpenAIAdapter.ts
127
- import type { LLMAdapter, ResponseCreateParamsNonStreaming, Response } from 'booths';
127
+ import type {
128
+ LLMAdapter,
129
+ ResponseCreateParamsNonStreaming,
130
+ ResponseCreateParamsStreaming,
131
+ Response,
132
+ StreamEvent
133
+ } from 'booths';
128
134
  import OpenAI from 'openai';
129
135
 
130
136
  export class OpenAIAdapter implements LLMAdapter<Response> {
@@ -141,6 +147,24 @@ export class OpenAIAdapter implements LLMAdapter<Response> {
141
147
  async interpret(response: Response): Promise<Response> {
142
148
  return response;
143
149
  }
150
+
151
+ // Optional: Add streaming support
152
+ async *invokeStream(params: ResponseCreateParamsStreaming): AsyncIterable<Response> {
153
+ const stream = this.openai.responses.create({ ...params, model: 'gpt-4o', stream: true });
154
+ for await (const chunk of stream) {
155
+ yield chunk;
156
+ }
157
+ }
158
+
159
+ async interpretStream(chunk: Response): Promise<StreamEvent> {
160
+ // Convert OpenAI stream chunks to StreamEvents
161
+ // Implementation depends on your streaming format
162
+ return {
163
+ type: 'text_delta',
164
+ delta: chunk.choices?.[0]?.delta?.content || '',
165
+ content: chunk.choices?.[0]?.delta?.content || ''
166
+ };
167
+ }
144
168
  }
145
169
  ```
146
170
 
@@ -195,6 +219,7 @@ Plugins are classes that implement the `BoothPlugin` interface. They can execute
195
219
  - `onBeforeToolCall`: Before each individual tool call is executed _(allows modification of tool parameters, validation, and logging)_.
196
220
  - `onAfterToolCall`: After each individual tool call is successfully executed _(allows result processing, caching, and transformation)_.
197
221
  - `onToolCallError`: When a tool call encounters an error _(allows custom error handling and recovery)_.
222
+ - `onStreamEvent`: _(Optional)_ During streaming response generation, called for each stream event _(enables real-time processing and UI updates)_.
198
223
  - `shouldEndInteractionLoop`: To determine if the conversation turn is over.
199
224
  - `onAfterInteractionLoopEnd`: After the main loop has finished.
200
225
 
@@ -227,3 +252,245 @@ The `InteractionProcessor` is the engine of the system. It manages the interacti
227
252
  5. Runs the `onResponseReceived` plugin hooks to process the response (e.g., execute tools).
228
253
  6. Repeats this loop until a plugin's `shouldEndInteractionLoop` returns `true`.
229
254
  7. Runs the `onAfter...` plugin hooks for cleanup.
255
+
256
+ ## Streaming Support
257
+
258
+ The Booths framework includes comprehensive streaming support that enables real-time response generation while preserving the full plugin ecosystem and backward compatibility.
259
+
260
+ ### Overview
261
+
262
+ Streaming allows the LLM's response to be processed and displayed in real-time as it's being generated, providing a more responsive user experience. The framework handles streaming at multiple levels:
263
+
264
+ - **Real-time Events**: Stream events are emitted as content arrives
265
+ - **Plugin Integration**: Plugins can hook into streaming events for real-time processing
266
+ - **Complete Responses**: Existing plugins continue to receive complete responses
267
+ - **Automatic Fallback**: Graceful fallback to non-streaming if streaming fails
268
+
269
+ ### Enabling Streaming
270
+
271
+ Streaming can be enabled simply by setting a boolean flag when creating the `InteractionProcessor`:
272
+
273
+ ```typescript
274
+ import { InteractionProcessor, type InteractionProcessorOptions } from 'booths';
275
+
276
+ const options: InteractionProcessorOptions = {
277
+ streaming: true, // Enable streaming
278
+ fallbackToNonStreaming: true // Optional: fallback if streaming fails
279
+ };
280
+
281
+ const processor = new InteractionProcessor(
282
+ boothRegistry,
283
+ pluginRegistry,
284
+ toolRegistry,
285
+ llmAdapter, // Must implement streaming methods
286
+ options
287
+ );
288
+ ```
289
+
290
+ ### Stream Events
291
+
292
+ The streaming system emits different types of events as the response is generated:
293
+
294
+ ```typescript
295
+ export interface StreamEvent {
296
+ type: 'text_delta' | 'tool_call_start' | 'tool_call_end' | 'response_start' | 'response_end';
297
+ content?: string; // Full content for text events
298
+ delta?: string; // Incremental text for text_delta events
299
+ toolCall?: object; // Tool call information
300
+ metadata?: any; // Additional event metadata
301
+ }
302
+ ```
303
+
304
+ **Event Types:**
305
+ - `response_start`: Streaming begins
306
+ - `text_delta`: Incremental text content arrives
307
+ - `tool_call_start`: LLM begins a tool call
308
+ - `tool_call_end`: Tool call completes
309
+ - `response_end`: Streaming completes
310
+
311
+ ### Streaming Plugin Hooks
312
+
313
+ Plugins can implement the optional `onStreamEvent` hook to process stream events in real-time:
314
+
315
+ ```typescript
316
+ import type { BoothPlugin, StreamEvent, StreamContext, RepositoryUtilities } from 'booths';
317
+
318
+ export class MyStreamingPlugin implements BoothPlugin {
319
+ id = 'my-streaming-plugin';
320
+ name = 'My Streaming Plugin';
321
+ description = 'Handles streaming events';
322
+
323
+ async onStreamEvent(
324
+ utilities: RepositoryUtilities,
325
+ streamEvent: StreamEvent,
326
+ context: StreamContext
327
+ ): Promise<StreamEvent> {
328
+ // Process the stream event
329
+ if (streamEvent.type === 'text_delta') {
330
+ console.log(`Received text: ${streamEvent.delta}`);
331
+
332
+ // Optionally transform the event
333
+ return {
334
+ ...streamEvent,
335
+ delta: streamEvent.delta?.toUpperCase() // Example transformation
336
+ };
337
+ }
338
+
339
+ return streamEvent; // Pass through unchanged
340
+ }
341
+
342
+ async shouldEndInteractionLoop(): Promise<boolean> {
343
+ return false;
344
+ }
345
+ }
346
+ ```
347
+
348
+ ### Built-in Streaming Plugins
349
+
350
+ The framework includes example streaming plugins:
351
+
352
+ #### StreamingLoggerPlugin
353
+
354
+ Logs streaming events in real-time for debugging and monitoring:
355
+
356
+ ```typescript
357
+ import { StreamingLoggerPlugin } from 'booths';
358
+
359
+ const logger = new StreamingLoggerPlugin('[MyApp]');
360
+ pluginRegistry.registerPlugins([logger]);
361
+ ```
362
+
363
+ #### StreamingUIPlugin
364
+
365
+ Provides real-time UI updates with customizable callbacks:
366
+
367
+ ```typescript
368
+ import { StreamingUIPlugin } from 'booths';
369
+
370
+ const uiPlugin = new StreamingUIPlugin((event, context) => {
371
+ if (event.type === 'text_delta') {
372
+ // Update your UI with the new text
373
+ document.getElementById('response').textContent += event.delta;
374
+ }
375
+ });
376
+
377
+ pluginRegistry.registerPlugins([uiPlugin]);
378
+ ```
379
+
380
+ ### LLM Adapter Streaming Implementation
381
+
382
+ To support streaming, your LLM adapter should implement the optional streaming methods:
383
+
384
+ ```typescript
385
+ export class MyStreamingAdapter implements LLMAdapter<MyResponse> {
386
+ // Required methods
387
+ async invoke(params: ResponseCreateParamsNonStreaming): Promise<MyResponse> {
388
+ // Non-streaming implementation
389
+ }
390
+
391
+ async interpret(response: MyResponse): Promise<Response> {
392
+ // Convert to standard format
393
+ }
394
+
395
+ // Optional streaming methods
396
+ async *invokeStream(params: ResponseCreateParamsStreaming): AsyncIterable<MyResponse> {
397
+ // Yield streaming chunks
398
+ const stream = await this.llm.createStreamingResponse(params);
399
+ for await (const chunk of stream) {
400
+ yield chunk;
401
+ }
402
+ }
403
+
404
+ async interpretStream(chunk: MyResponse): Promise<StreamEvent> {
405
+ // Convert chunk to StreamEvent
406
+ return {
407
+ type: 'text_delta',
408
+ delta: chunk.delta,
409
+ content: chunk.content
410
+ };
411
+ }
412
+ }
413
+ ```
414
+
415
+ ### Stream Context
416
+
417
+ Plugins receive context information about the streaming session:
418
+
419
+ ```typescript
420
+ export interface StreamContext {
421
+ responseParams: ResponseCreateParamsNonStreaming; // Original request
422
+ streamIndex: number; // Event index in stream
423
+ totalExpectedEvents?: number; // Expected total (if known)
424
+ accumulatedResponse: Partial<Response>; // Response built so far
425
+ }
426
+ ```
427
+
428
+ ### Error Handling
429
+
430
+ The streaming system includes robust error handling:
431
+
432
+ - **Plugin Error Isolation**: Errors in streaming plugins don't break the stream
433
+ - **Automatic Fallback**: Can fallback to non-streaming mode on errors
434
+ - **Graceful Degradation**: System continues operating if streaming fails
435
+
436
+ ### Backward Compatibility
437
+
438
+ Streaming support is fully backward compatible:
439
+
440
+ - **Existing Plugins**: Continue to work unchanged
441
+ - **Complete Responses**: Plugins still receive full `Response` objects
442
+ - **Optional Implementation**: Adapters don't require streaming support
443
+ - **Default Behavior**: Non-streaming mode by default
444
+
445
+ ### Example: Complete Streaming Setup
446
+
447
+ Here's a complete example showing streaming integration:
448
+
449
+ ```typescript
450
+ import {
451
+ InteractionProcessor,
452
+ BoothRegistry,
453
+ BoothPluginRegistry,
454
+ ToolRegistry,
455
+ StreamingLoggerPlugin,
456
+ StreamingUIPlugin,
457
+ type InteractionProcessorOptions
458
+ } from 'booths';
459
+
460
+ // 1. Create streaming-enabled adapter (implement streaming methods)
461
+ const streamingAdapter = new MyStreamingLLMAdapter(apiKey);
462
+
463
+ // 2. Set up registries and booth
464
+ const testBooth = { id: 'chat-booth', role: 'Assistant', description: 'Helpful assistant' };
465
+ const boothRegistry = new BoothRegistry(testBooth);
466
+ const pluginRegistry = new BoothPluginRegistry();
467
+ const toolRegistry = new ToolRegistry();
468
+
469
+ // 3. Set up streaming plugins
470
+ const logger = new StreamingLoggerPlugin('[Chat]');
471
+ const uiUpdater = new StreamingUIPlugin((event) => {
472
+ if (event.type === 'text_delta') {
473
+ document.getElementById('chat').textContent += event.delta;
474
+ }
475
+ });
476
+
477
+ pluginRegistry.registerPlugins([logger, uiUpdater]);
478
+
479
+ // 4. Enable streaming
480
+ const streamingOptions: InteractionProcessorOptions = {
481
+ streaming: true,
482
+ fallbackToNonStreaming: true
483
+ };
484
+
485
+ const processor = new InteractionProcessor(
486
+ boothRegistry,
487
+ pluginRegistry,
488
+ toolRegistry,
489
+ streamingAdapter,
490
+ streamingOptions
491
+ );
492
+
493
+ // 5. Send message with real-time streaming
494
+ const response = await processor.send('Hello, stream this response!');
495
+ // User sees content appear in real-time, plugins receive complete response
496
+ ```
package/dist/index.d.ts CHANGED
@@ -150,6 +150,15 @@ export declare interface BoothPlugin {
150
150
  * @returns The potentially modified final response.
151
151
  */
152
152
  onAfterInteractionLoopEnd?: (interactionLoopEndArgs: RepositoryUtilities, response: Response_2) => Promise<Response_2>;
153
+ /**
154
+ * Called for each streaming event as it arrives during response generation.
155
+ * This is optional and only called when streaming is enabled.
156
+ * @param utilities - Utilities for accessing repositories.
157
+ * @param streamEvent - The streaming event that was received.
158
+ * @param context - Context information about the streaming session.
159
+ * @returns The potentially modified stream event, or void to pass through unchanged.
160
+ */
161
+ onStreamEvent?: (utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext) => Promise<StreamEvent | void>;
153
162
  }
154
163
 
155
164
  /**
@@ -293,6 +302,17 @@ export declare class BoothPluginRegistry {
293
302
  * @returns Error result or recovery value after all plugins have processed it
294
303
  */
295
304
  runToolCallError(utilities: RepositoryUtilities, toolCall: ResponseFunctionToolCall, error: Error, context: ToolCallContext): Promise<any>;
305
+ /**
306
+ * Sequentially invokes every plugin's onStreamEvent hook.
307
+ * This is called for each streaming event during response generation,
308
+ * allowing plugins to process or modify stream events in real-time.
309
+ *
310
+ * @param utilities - Context information including booth and tool registries
311
+ * @param streamEvent - The streaming event that was received
312
+ * @param context - Context information about the streaming session
313
+ * @returns Modified stream event after all plugins have processed it
314
+ */
315
+ runStreamEvent(utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext): Promise<StreamEvent>;
296
316
  }
297
317
 
298
318
  /**
@@ -765,7 +785,22 @@ export declare class InteractionProcessor<T> {
765
785
  private boothPlugins;
766
786
  private toolRegistry;
767
787
  private llmAdapter;
788
+ /**
789
+ * Generates a consistent ID for responses and messages.
790
+ * @param prefix - The prefix for the ID (e.g., 'stream', 'error', 'msg')
791
+ * @returns A unique ID string
792
+ * @private
793
+ */
794
+ private generateId;
795
+ /**
796
+ * Creates a standardized message object for responses.
797
+ * @param text - The text content for the message
798
+ * @returns A formatted message object
799
+ * @private
800
+ */
801
+ private createMessage;
768
802
  private loopLimit;
803
+ private options;
769
804
  /**
770
805
  * Creates a synthetic error response with proper structure and error details.
771
806
  * @param error - The error that occurred
@@ -781,6 +816,36 @@ export declare class InteractionProcessor<T> {
781
816
  * @private
782
817
  */
783
818
  private callLLM;
819
+ /**
820
+ * Calls the LLM in non-streaming mode.
821
+ * @param responseCreateParams - The parameters for creating the response.
822
+ * @returns A promise that resolves with the LLM's response.
823
+ * @private
824
+ */
825
+ private callLLMNonStreaming;
826
+ /**
827
+ * Calls the LLM in streaming mode, accumulating stream events into a complete response.
828
+ * @param responseCreateParams - The parameters for creating the response.
829
+ * @returns A promise that resolves with the accumulated response.
830
+ * @private
831
+ */
832
+ private callLLMStreaming;
833
+ /**
834
+ * Merges a stream event into the accumulated response.
835
+ * @param accumulated - The current accumulated response.
836
+ * @param streamEvent - The stream event to merge.
837
+ * @returns The updated accumulated response.
838
+ * @private
839
+ */
840
+ private mergeStreamEvent;
841
+ /**
842
+ * Creates a complete Response object from accumulated stream data.
843
+ * @param accumulated - The accumulated response data.
844
+ * @param originalParams - The original request parameters.
845
+ * @returns A complete Response object.
846
+ * @private
847
+ */
848
+ private finalizeAccumulatedResponse;
784
849
  /**
785
850
  * Runs the main interaction loop, sending messages to the LLM and processing
786
851
  * the responses through the registered plugins.
@@ -795,8 +860,9 @@ export declare class InteractionProcessor<T> {
795
860
  * @param boothPlugins - The registry for booth plugins.
796
861
  * @param toolRegistry - The registry for available tools.
797
862
  * @param llmAdapter - The adapter for interacting with the LLM.
863
+ * @param options - Configuration options for streaming and other behaviors.
798
864
  */
799
- constructor(boothRegistry: BoothRegistry, boothPlugins: BoothPluginRegistry, toolRegistry: ToolRegistry, llmAdapter: LLMAdapter<T>);
865
+ constructor(boothRegistry: BoothRegistry, boothPlugins: BoothPluginRegistry, toolRegistry: ToolRegistry, llmAdapter: LLMAdapter<T>, options?: InteractionProcessorOptions);
800
866
  /**
801
867
  * Sends a message to the LLM and processes the response through the interaction loop.
802
868
  * This involves running pre-loop, pre-send, response-received, and post-loop plugin hooks.
@@ -806,9 +872,23 @@ export declare class InteractionProcessor<T> {
806
872
  send(input: string | ResponseInput): Promise<Response_2>;
807
873
  }
808
874
 
875
+ /**
876
+ * Configuration options for the InteractionProcessor.
877
+ */
878
+ export declare interface InteractionProcessorOptions {
879
+ /** Enable streaming mode for LLM responses */
880
+ streaming?: boolean;
881
+ /** Fallback to non-streaming if streaming fails */
882
+ fallbackToNonStreaming?: boolean;
883
+ }
884
+
809
885
  export declare interface LLMAdapter<LLMResponse = any> {
810
886
  invoke: (responseParams: ResponseCreateParamsNonStreaming) => Promise<LLMResponse>;
811
887
  interpret: (response: LLMResponse) => Promise<Response_2>;
888
+ /** Optional method for streaming LLM responses */
889
+ invokeStream?: (responseParams: ResponseCreateParamsStreaming) => AsyncIterable<LLMResponse>;
890
+ /** Optional method for interpreting individual stream chunks into StreamEvents */
891
+ interpretStream?: (streamChunk: LLMResponse) => Promise<StreamEvent>;
812
892
  }
813
893
 
814
894
  /**
@@ -832,8 +912,24 @@ export declare type RepositoryUtilities = {
832
912
  llmAdapter: LLMAdapter<unknown>;
833
913
  };
834
914
 
915
+ export { Response_2 as Response }
916
+
835
917
  export { ResponseCreateParamsNonStreaming }
836
918
 
919
+ /**
920
+ * Response parameters for streaming requests.
921
+ * This creates a new type that has all the properties of ResponseCreateParamsNonStreaming
922
+ * but with stream: true instead of stream: false.
923
+ */
924
+ export declare type ResponseCreateParamsStreaming = Omit<ResponseCreateParamsNonStreaming, 'stream'> & {
925
+ /** Must be true for streaming requests */
926
+ stream: true;
927
+ };
928
+
929
+ export { ResponseInput }
930
+
931
+ export { ResponseInputItem }
932
+
837
933
  /**
838
934
  * Represents the result of processing a single tool call.
839
935
  */
@@ -854,6 +950,93 @@ export declare type SingleToolProcessingResult = {
854
950
  toolExecuted: boolean;
855
951
  };
856
952
 
953
+ /**
954
+ * Context information provided during streaming event processing.
955
+ */
956
+ export declare interface StreamContext {
957
+ /** The current response parameters being processed */
958
+ responseParams: ResponseCreateParamsNonStreaming;
959
+ /** Index of this stream event in the sequence */
960
+ streamIndex: number;
961
+ /** Total expected number of events (if known) */
962
+ totalExpectedEvents?: number;
963
+ /** Accumulated response content so far */
964
+ accumulatedResponse: Partial<Response_2>;
965
+ }
966
+
967
+ /**
968
+ * Represents a streaming event emitted during LLM response generation.
969
+ */
970
+ export declare interface StreamEvent {
971
+ /** Type of stream event */
972
+ type: 'text_delta' | 'tool_call_start' | 'tool_call_end' | 'response_start' | 'response_end';
973
+ /** Text content for text_delta events */
974
+ content?: string;
975
+ /** Incremental text delta for text_delta events */
976
+ delta?: string;
977
+ /** Tool call information for tool-related events */
978
+ toolCall?: ResponseFunctionToolCall;
979
+ /** Additional metadata for the event */
980
+ metadata?: Record<string, unknown>;
981
+ }
982
+
983
+ /**
984
+ * Callback function type for handling stream events in the UI.
985
+ */
986
+ export declare type StreamEventCallback = (event: StreamEvent, context: StreamContext) => void;
987
+
988
+ /**
989
+ * Example streaming plugin that logs stream events in real-time.
990
+ * This demonstrates how to implement streaming hooks in plugins.
991
+ */
992
+ export declare class StreamingLoggerPlugin implements BoothPlugin {
993
+ readonly id = "streaming-logger";
994
+ readonly name = "Streaming Logger Plugin";
995
+ readonly description = "Logs streaming events in real-time for debugging and monitoring";
996
+ private logPrefix;
997
+ constructor(logPrefix?: string);
998
+ /**
999
+ * Handle individual stream events as they arrive.
1000
+ * This allows for real-time processing and logging of streaming content.
1001
+ */
1002
+ onStreamEvent(_utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext): Promise<StreamEvent>;
1003
+ /**
1004
+ * Required method - determines whether to end the interaction loop.
1005
+ * For a logging plugin, we never want to end the loop ourselves.
1006
+ */
1007
+ shouldEndInteractionLoop(): Promise<boolean>;
1008
+ }
1009
+
1010
+ /**
1011
+ * Example streaming plugin that provides real-time UI updates.
1012
+ * This plugin demonstrates how to emit stream events to the UI layer.
1013
+ */
1014
+ export declare class StreamingUIPlugin implements BoothPlugin {
1015
+ readonly id = "streaming-ui";
1016
+ readonly name = "Streaming UI Plugin";
1017
+ readonly description = "Provides real-time UI updates during streaming responses";
1018
+ private onStreamCallback?;
1019
+ constructor(onStreamCallback?: StreamEventCallback);
1020
+ /**
1021
+ * Handle individual stream events and emit them to the UI layer.
1022
+ * This enables real-time updates to the user interface.
1023
+ */
1024
+ onStreamEvent(_utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext): Promise<StreamEvent>;
1025
+ /**
1026
+ * Set or update the stream callback for UI updates.
1027
+ */
1028
+ setStreamCallback(callback: StreamEventCallback): void;
1029
+ /**
1030
+ * Remove the stream callback.
1031
+ */
1032
+ removeStreamCallback(): void;
1033
+ /**
1034
+ * Required method - determines whether to end the interaction loop.
1035
+ * For a UI plugin, we never want to end the loop ourselves.
1036
+ */
1037
+ shouldEndInteractionLoop(): Promise<boolean>;
1038
+ }
1039
+
857
1040
  /**
858
1041
  * Context information provided during tool call execution.
859
1042
  */
package/dist/index.js CHANGED
@@ -1,3 +1,4 @@
1
+ import { randomUUID as R } from "crypto";
1
2
  class p {
2
3
  /**
3
4
  * Collection of registered plugins.
@@ -180,8 +181,8 @@ class p {
180
181
  */
181
182
  async runAfterToolCall(t, o, e, r) {
182
183
  let s = e;
183
- for (const l of this.plugins)
184
- l.onAfterToolCall && (s = await l.onAfterToolCall(t, o, s, r));
184
+ for (const i of this.plugins)
185
+ i.onAfterToolCall && (s = await i.onAfterToolCall(t, o, s, r));
185
186
  return s;
186
187
  }
187
188
  /**
@@ -197,10 +198,32 @@ class p {
197
198
  */
198
199
  async runToolCallError(t, o, e, r) {
199
200
  let s = `Error: ${e.message}`;
200
- for (const l of this.plugins)
201
- l.onToolCallError && (s = await l.onToolCallError(t, o, e, r));
201
+ for (const i of this.plugins)
202
+ i.onToolCallError && (s = await i.onToolCallError(t, o, e, r));
202
203
  return s;
203
204
  }
205
+ /**
206
+ * Sequentially invokes every plugin's onStreamEvent hook.
207
+ * This is called for each streaming event during response generation,
208
+ * allowing plugins to process or modify stream events in real-time.
209
+ *
210
+ * @param utilities - Context information including booth and tool registries
211
+ * @param streamEvent - The streaming event that was received
212
+ * @param context - Context information about the streaming session
213
+ * @returns Modified stream event after all plugins have processed it
214
+ */
215
+ async runStreamEvent(t, o, e) {
216
+ let r = o;
217
+ for (const s of this.plugins)
218
+ if (s.onStreamEvent)
219
+ try {
220
+ const i = await s.onStreamEvent(t, r, e);
221
+ i && (r = i);
222
+ } catch (i) {
223
+ console.error(`Error in plugin ${s.id} during stream event processing:`, i);
224
+ }
225
+ return r;
226
+ }
204
227
  }
205
228
  const u = {
206
229
  id: "orchestrator",
@@ -237,7 +260,7 @@ const u = {
237
260
  - User: "I need help" → "What specifically would you like help with?" → then route based on response
238
261
  `
239
262
  };
240
- class R {
263
+ class B {
241
264
  /**
242
265
  * Creates a new booth registry with a specified base booth configuration.
243
266
  *
@@ -434,18 +457,54 @@ class R {
434
457
  ).length <= 1 && this.hasOrchestrator && this.disableMultiBoothMode();
435
458
  }
436
459
  }
437
- class B {
460
+ class T {
438
461
  /**
439
462
  * Creates an instance of InteractionProcessor.
440
463
  * @param boothRegistry - The registry for booth configurations.
441
464
  * @param boothPlugins - The registry for booth plugins.
442
465
  * @param toolRegistry - The registry for available tools.
443
466
  * @param llmAdapter - The adapter for interacting with the LLM.
467
+ * @param options - Configuration options for streaming and other behaviors.
444
468
  */
445
- constructor(t, o, e, r) {
446
- this.boothRegistry = t, this.boothPlugins = o, this.toolRegistry = e, this.llmAdapter = r;
469
+ constructor(t, o, e, r, s) {
470
+ this.boothRegistry = t, this.boothPlugins = o, this.toolRegistry = e, this.llmAdapter = r, this.options = {
471
+ streaming: !1,
472
+ fallbackToNonStreaming: !0,
473
+ ...s
474
+ };
475
+ }
476
+ /**
477
+ * Generates a consistent ID for responses and messages.
478
+ * @param prefix - The prefix for the ID (e.g., 'stream', 'error', 'msg')
479
+ * @returns A unique ID string
480
+ * @private
481
+ */
482
+ generateId(t) {
483
+ return `${t}_${Date.now()}_${R()}`;
484
+ }
485
+ /**
486
+ * Creates a standardized message object for responses.
487
+ * @param text - The text content for the message
488
+ * @returns A formatted message object
489
+ * @private
490
+ */
491
+ createMessage(t) {
492
+ return {
493
+ id: this.generateId("msg"),
494
+ content: [
495
+ {
496
+ type: "output_text",
497
+ text: t,
498
+ annotations: []
499
+ }
500
+ ],
501
+ role: "assistant",
502
+ status: "completed",
503
+ type: "message"
504
+ };
447
505
  }
448
506
  loopLimit = 10;
507
+ options;
449
508
  /**
450
509
  * Creates a synthetic error response with proper structure and error details.
451
510
  * @param error - The error that occurred
@@ -461,7 +520,7 @@ class B {
461
520
  if (r && (s.code = r), o.model === void 0)
462
521
  throw new Error("Model must be specified in response parameters for error handling.");
463
522
  return {
464
- id: `error_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
523
+ id: this.generateId("error"),
465
524
  created_at: Math.floor(Date.now() / 1e3),
466
525
  output_text: "An error occurred while communicating with the language model.",
467
526
  error: s,
@@ -471,19 +530,7 @@ class B {
471
530
  model: o.model,
472
531
  object: "response",
473
532
  output: [
474
- {
475
- id: `msg_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
476
- content: [
477
- {
478
- type: "output_text",
479
- text: `Error: ${e}. Please try again or contact support if the issue persists.`,
480
- annotations: []
481
- }
482
- ],
483
- role: "assistant",
484
- status: "completed",
485
- type: "message"
486
- }
533
+ this.createMessage(`Error: ${e}. Please try again or contact support if the issue persists.`)
487
534
  ],
488
535
  parallel_tool_calls: o.parallel_tool_calls || !1,
489
536
  temperature: o.temperature || null,
@@ -500,6 +547,21 @@ class B {
500
547
  * @private
501
548
  */
502
549
  async callLLM(t) {
550
+ if (this.options.streaming && this.llmAdapter.invokeStream && this.llmAdapter.interpretStream)
551
+ try {
552
+ return await this.callLLMStreaming(t);
553
+ } catch (o) {
554
+ return console.error("Error calling LLM with streaming:", o), this.options.fallbackToNonStreaming ? (console.warn("Falling back to non-streaming mode"), await this.callLLMNonStreaming(t)) : this.createErrorResponse(o, t);
555
+ }
556
+ return await this.callLLMNonStreaming(t);
557
+ }
558
+ /**
559
+ * Calls the LLM in non-streaming mode.
560
+ * @param responseCreateParams - The parameters for creating the response.
561
+ * @returns A promise that resolves with the LLM's response.
562
+ * @private
563
+ */
564
+ async callLLMNonStreaming(t) {
503
565
  try {
504
566
  const o = await this.llmAdapter.invoke(t);
505
567
  return await this.llmAdapter.interpret(o);
@@ -507,6 +569,91 @@ class B {
507
569
  return console.error("Error calling LLM:", o), this.createErrorResponse(o, t);
508
570
  }
509
571
  }
572
+ /**
573
+ * Calls the LLM in streaming mode, accumulating stream events into a complete response.
574
+ * @param responseCreateParams - The parameters for creating the response.
575
+ * @returns A promise that resolves with the accumulated response.
576
+ * @private
577
+ */
578
+ async callLLMStreaming(t) {
579
+ if (!this.llmAdapter.invokeStream || !this.llmAdapter.interpretStream)
580
+ throw new Error("Adapter does not support streaming");
581
+ const o = {
582
+ ...t,
583
+ stream: !0
584
+ }, e = this.llmAdapter.invokeStream(o);
585
+ let r = {
586
+ output: [],
587
+ output_text: ""
588
+ }, s = 0;
589
+ for await (const i of e) {
590
+ const l = await this.llmAdapter.interpretStream(i), h = {
591
+ responseParams: t,
592
+ streamIndex: s,
593
+ accumulatedResponse: r
594
+ }, a = await this.boothPlugins.runStreamEvent(
595
+ {
596
+ toolRegistry: this.toolRegistry,
597
+ boothRegistry: this.boothRegistry,
598
+ pluginRegistry: this.boothPlugins,
599
+ llmAdapter: this.llmAdapter
600
+ },
601
+ l,
602
+ h
603
+ );
604
+ r = this.mergeStreamEvent(r, a), s++;
605
+ }
606
+ return this.finalizeAccumulatedResponse(r, t);
607
+ }
608
+ /**
609
+ * Merges a stream event into the accumulated response.
610
+ * @param accumulated - The current accumulated response.
611
+ * @param streamEvent - The stream event to merge.
612
+ * @returns The updated accumulated response.
613
+ * @private
614
+ */
615
+ mergeStreamEvent(t, o) {
616
+ if (!o || !o.type)
617
+ return t;
618
+ switch (o.type) {
619
+ case "text_delta":
620
+ o.delta && (t.output_text = (t.output_text || "") + o.delta);
621
+ break;
622
+ case "tool_call_start":
623
+ o.toolCall && (t.output = t.output || [], t.output.push(o.toolCall));
624
+ break;
625
+ }
626
+ return t;
627
+ }
628
+ /**
629
+ * Creates a complete Response object from accumulated stream data.
630
+ * @param accumulated - The accumulated response data.
631
+ * @param originalParams - The original request parameters.
632
+ * @returns A complete Response object.
633
+ * @private
634
+ */
635
+ finalizeAccumulatedResponse(t, o) {
636
+ return {
637
+ id: this.generateId("stream"),
638
+ created_at: Math.floor(Date.now() / 1e3),
639
+ output_text: t.output_text || "",
640
+ error: null,
641
+ incomplete_details: null,
642
+ instructions: null,
643
+ metadata: null,
644
+ model: o.model || "unknown",
645
+ object: "response",
646
+ output: t.output || [
647
+ this.createMessage(t.output_text || "")
648
+ ],
649
+ parallel_tool_calls: o.parallel_tool_calls || !1,
650
+ temperature: o.temperature || null,
651
+ tool_choice: o.tool_choice || "auto",
652
+ tools: o.tools || [],
653
+ top_p: o.top_p || null,
654
+ status: "completed"
655
+ };
656
+ }
510
657
  /**
511
658
  * Runs the main interaction loop, sending messages to the LLM and processing
512
659
  * the responses through the registered plugins.
@@ -588,7 +735,7 @@ class B {
588
735
  ), r;
589
736
  }
590
737
  }
591
- const T = {
738
+ const I = {
592
739
  id: "summarizer",
593
740
  role: 'You are a highly skilled summarization AI. Your task is to read a conversation history and provide a concise, neutral, and objective summary. The summary should capture the key points, decisions made, and any unresolved questions. It must be written from a third-person perspective and should be clear enough for another AI assistant to understand the full context and continue the conversation seamlessly without needing the original transcript. Do not add any conversational fluff or introductory phrases like "Here is the summary:".',
594
741
  description: "A specialized booth for summarizing conversation histories."
@@ -644,7 +791,7 @@ ${o}
644
791
  }
645
792
  };
646
793
  }
647
- class v {
794
+ class x {
648
795
  /**
649
796
  * The sessionHistory variable stores the conversation history between the user and the booth system.
650
797
  * It is initialized as an empty array and will be populated with messages exchanged during the interaction.
@@ -740,13 +887,13 @@ class v {
740
887
  async onResponseReceived(t, o, e) {
741
888
  let s = [...o.input, ...e?.output ?? []];
742
889
  if (this.responseContainsBoothChange(e)) {
743
- const i = `Please summarize the following conversation history:
890
+ const l = `Please summarize the following conversation history:
744
891
 
745
- ${JSON.stringify(this.sessionHistory)}`, d = (await P(t.llmAdapter, T).callProcessor.send(i)).output_text, g = s.filter((h) => "role" in h && h.role === "user").pop(), b = {
892
+ ${JSON.stringify(this.sessionHistory)}`, g = (await M(t.llmAdapter, I).callProcessor.send(l)).output_text, d = s.filter((c) => "role" in c && c.role === "user").pop(), b = {
746
893
  role: "developer",
747
- content: `A conversation summary up to this point: ${d}`
748
- }, w = s.filter((h) => !("role" in h && h.role === "user" || "type" in h && h.type === "message"));
749
- this.sessionHistory = g ? [...w, b, g] : [...w, b], s = this.sessionHistory;
894
+ content: `A conversation summary up to this point: ${g}`
895
+ }, _ = s.filter((c) => !("role" in c && c.role === "user" || "type" in c && c.type === "message"));
896
+ this.sessionHistory = d ? [..._, b, d] : [..._, b], s = this.sessionHistory;
750
897
  } else
751
898
  this.sessionHistory = s;
752
899
  return {
@@ -764,7 +911,7 @@ ${JSON.stringify(this.sessionHistory)}`, d = (await P(t.llmAdapter, T).callProce
764
911
  return !1;
765
912
  }
766
913
  }
767
- class E {
914
+ class S {
768
915
  /**
769
916
  * Unique identifier for this plugin instance.
770
917
  * @private
@@ -810,12 +957,12 @@ class E {
810
957
  const e = t.boothRegistry;
811
958
  let s = e.baseBoothConfig.description;
812
959
  if (e.isMultiBoothMode) {
813
- const l = e.orchestratorBoothConfig, i = e.currentContextBoothConfig;
960
+ const i = e.orchestratorBoothConfig, l = e.currentContextBoothConfig;
814
961
  s += `
815
962
 
816
- ${l.description}`, i.id !== l.id && (s += `
963
+ ${i.description}`, l.id !== i.id && (s += `
817
964
 
818
- ${i.description}`);
965
+ ${l.description}`);
819
966
  }
820
967
  return { ...o, instructions: s };
821
968
  }
@@ -829,7 +976,7 @@ class E {
829
976
  return !1;
830
977
  }
831
978
  }
832
- class _ {
979
+ class w {
833
980
  tools;
834
981
  /**
835
982
  * Initializes an empty Map to store tools.
@@ -925,7 +1072,7 @@ class _ {
925
1072
  this.tools.delete(t);
926
1073
  }
927
1074
  }
928
- function I(n) {
1075
+ function v(n) {
929
1076
  switch (n.type) {
930
1077
  case "function":
931
1078
  return `function:${n.name}`;
@@ -951,12 +1098,12 @@ function I(n) {
951
1098
  function C(n) {
952
1099
  const t = /* @__PURE__ */ new Set(), o = [];
953
1100
  for (const e of n) {
954
- const r = I(e);
1101
+ const r = v(e);
955
1102
  t.has(r) || (t.add(r), o.push(e));
956
1103
  }
957
1104
  return o;
958
1105
  }
959
- class x {
1106
+ class E {
960
1107
  description = "A plugin to aggregate and provide tools from base and context booths.";
961
1108
  id = "tool-provider";
962
1109
  name = "Tool Provider Plugin";
@@ -969,18 +1116,18 @@ class x {
969
1116
  * @returns The updated response parameters with the aggregated list of tools.
970
1117
  */
971
1118
  async onBeforeMessageSend(t, o) {
972
- const e = t.boothRegistry.baseBoothConfig, r = t.boothRegistry.currentContextBoothConfig, i = [...e.tools || [], ...r?.tools || []].filter((a, d, g) => g.indexOf(a) === d).map(
1119
+ const e = t.boothRegistry.baseBoothConfig, r = t.boothRegistry.currentContextBoothConfig, l = [...e.tools || [], ...r?.tools || []].filter((a, g, d) => d.indexOf(a) === g).map(
973
1120
  (a) => t.toolRegistry.getTool(a)
974
1121
  );
975
- if (e.mcp && i.push(...e.mcp), r?.mcp && i.push(...r.mcp), t.boothRegistry.isMultiBoothMode) {
1122
+ if (e.mcp && l.push(...e.mcp), r?.mcp && l.push(...r.mcp), t.boothRegistry.isMultiBoothMode) {
976
1123
  const a = y(t.boothRegistry);
977
- i.push(a);
1124
+ l.push(a);
978
1125
  }
979
- i.push(...t.toolRegistry.getGlobalTools());
980
- const c = C(i);
1126
+ l.push(...t.toolRegistry.getGlobalTools());
1127
+ const h = C(l);
981
1128
  return {
982
1129
  ...o,
983
- tools: c
1130
+ tools: h
984
1131
  };
985
1132
  }
986
1133
  /**
@@ -1023,16 +1170,16 @@ class m {
1023
1170
  call_id: r.call_id,
1024
1171
  output: `Error: Tool '${r.name}' does not have an 'execute' method.`
1025
1172
  };
1026
- const l = await s.execute(JSON.parse(r.arguments)), i = await t.pluginRegistry.runAfterToolCall(
1173
+ const i = await s.execute(JSON.parse(r.arguments)), l = await t.pluginRegistry.runAfterToolCall(
1027
1174
  t,
1028
1175
  r,
1029
- l,
1176
+ i,
1030
1177
  e
1031
1178
  );
1032
1179
  return {
1033
1180
  type: "function_call_output",
1034
1181
  call_id: r.call_id,
1035
- output: JSON.stringify(i)
1182
+ output: JSON.stringify(l)
1036
1183
  };
1037
1184
  } catch (r) {
1038
1185
  console.error(`Error executing tool ${o.name}:`, r);
@@ -1067,22 +1214,22 @@ class m {
1067
1214
  const r = e?.output ?? [], s = m.extractFunctionCalls(r);
1068
1215
  if (!s.length)
1069
1216
  return o;
1070
- const l = [];
1071
- for (let i = 0; i < s.length; i++) {
1072
- const c = s[i];
1073
- if (t.toolRegistry.isLocalTool(c.name))
1217
+ const i = [];
1218
+ for (let l = 0; l < s.length; l++) {
1219
+ const h = s[l];
1220
+ if (t.toolRegistry.isLocalTool(h.name))
1074
1221
  continue;
1075
1222
  const a = {
1076
1223
  responseParams: o,
1077
1224
  response: e,
1078
- toolCallIndex: i,
1225
+ toolCallIndex: l,
1079
1226
  totalToolCalls: s.length
1080
- }, d = await this.executeToolCall(t, c, a);
1081
- l.push(d);
1227
+ }, g = await this.executeToolCall(t, h, a);
1228
+ i.push(g);
1082
1229
  }
1083
1230
  return {
1084
1231
  ...o,
1085
- input: [...o.input, ...l]
1232
+ input: [...o.input, ...i]
1086
1233
  };
1087
1234
  }
1088
1235
  /**
@@ -1094,7 +1241,7 @@ class m {
1094
1241
  return !1;
1095
1242
  }
1096
1243
  }
1097
- class M {
1244
+ class A {
1098
1245
  description = "A plugin to ensure the interaction loop can be finished.";
1099
1246
  id = "finish-turn-plugin";
1100
1247
  name = "Finish Turn Plugin";
@@ -1151,16 +1298,99 @@ class M {
1151
1298
  return o;
1152
1299
  }
1153
1300
  }
1154
- function P(n, t) {
1155
- const o = new R(t), e = new _(), r = new p();
1156
- return new A({
1301
+ class $ {
1302
+ id = "streaming-logger";
1303
+ name = "Streaming Logger Plugin";
1304
+ description = "Logs streaming events in real-time for debugging and monitoring";
1305
+ logPrefix;
1306
+ constructor(t = "[StreamingLogger]") {
1307
+ this.logPrefix = t;
1308
+ }
1309
+ /**
1310
+ * Handle individual stream events as they arrive.
1311
+ * This allows for real-time processing and logging of streaming content.
1312
+ */
1313
+ async onStreamEvent(t, o, e) {
1314
+ switch (o.type) {
1315
+ case "response_start":
1316
+ console.log(`${this.logPrefix} Stream started`);
1317
+ break;
1318
+ case "text_delta":
1319
+ console.log(`${this.logPrefix} Text chunk [${e.streamIndex}]: "${o.delta}"`);
1320
+ break;
1321
+ case "tool_call_start":
1322
+ console.log(`${this.logPrefix} Tool call started: ${o.toolCall?.name}`);
1323
+ break;
1324
+ case "tool_call_end":
1325
+ console.log(`${this.logPrefix} Tool call completed: ${o.toolCall?.name}`);
1326
+ break;
1327
+ case "response_end":
1328
+ console.log(`${this.logPrefix} Stream completed after ${e.streamIndex} events`);
1329
+ break;
1330
+ default:
1331
+ console.log(`${this.logPrefix} Stream event [${e.streamIndex}]: ${o.type}`);
1332
+ }
1333
+ return o;
1334
+ }
1335
+ /**
1336
+ * Required method - determines whether to end the interaction loop.
1337
+ * For a logging plugin, we never want to end the loop ourselves.
1338
+ */
1339
+ async shouldEndInteractionLoop() {
1340
+ return !1;
1341
+ }
1342
+ }
1343
+ class k {
1344
+ id = "streaming-ui";
1345
+ name = "Streaming UI Plugin";
1346
+ description = "Provides real-time UI updates during streaming responses";
1347
+ onStreamCallback;
1348
+ constructor(t) {
1349
+ this.onStreamCallback = t;
1350
+ }
1351
+ /**
1352
+ * Handle individual stream events and emit them to the UI layer.
1353
+ * This enables real-time updates to the user interface.
1354
+ */
1355
+ async onStreamEvent(t, o, e) {
1356
+ this.onStreamCallback && o.type === "text_delta" && this.onStreamCallback(o, e);
1357
+ let r = o;
1358
+ return o.type === "text_delta" && o.delta && (r = {
1359
+ ...o,
1360
+ // Example: add HTML escaping (though this should be done in the UI layer)
1361
+ delta: o.delta
1362
+ }), r;
1363
+ }
1364
+ /**
1365
+ * Set or update the stream callback for UI updates.
1366
+ */
1367
+ setStreamCallback(t) {
1368
+ this.onStreamCallback = t;
1369
+ }
1370
+ /**
1371
+ * Remove the stream callback.
1372
+ */
1373
+ removeStreamCallback() {
1374
+ this.onStreamCallback = void 0;
1375
+ }
1376
+ /**
1377
+ * Required method - determines whether to end the interaction loop.
1378
+ * For a UI plugin, we never want to end the loop ourselves.
1379
+ */
1380
+ async shouldEndInteractionLoop() {
1381
+ return !1;
1382
+ }
1383
+ }
1384
+ function M(n, t) {
1385
+ const o = new B(t), e = new w(), r = new p();
1386
+ return new L({
1157
1387
  llmAdapter: n,
1158
1388
  booths: o,
1159
1389
  tools: e,
1160
1390
  boothPlugins: r
1161
1391
  });
1162
1392
  }
1163
- class A {
1393
+ class L {
1164
1394
  /**
1165
1395
  * Represents a registry that maintains a collection of plugins for a booth system.
1166
1396
  * The boothPluginRegistry is used to manage and access plugins that enhance
@@ -1215,7 +1445,7 @@ class A {
1215
1445
  * @param {ToolRegistry} options.tools - Registry containing tool configurations
1216
1446
  */
1217
1447
  constructor(t) {
1218
- if (this.boothPluginRegistry = t?.boothPlugins ?? new p(), this.boothRegistry = t.booths, this.toolRegistry = t?.tools ?? new _(), this.boothRegistry.setMultiBoothModeCallbacks(
1448
+ if (this.boothPluginRegistry = t?.boothPlugins ?? new p(), this.boothRegistry = t.booths, this.toolRegistry = t?.tools ?? new w(), this.boothRegistry.setMultiBoothModeCallbacks(
1219
1449
  () => {
1220
1450
  const o = y(this.boothRegistry);
1221
1451
  this.toolRegistry.registerTools([o]);
@@ -1227,12 +1457,12 @@ class A {
1227
1457
  this.toolRegistry.registerTools([o]);
1228
1458
  }
1229
1459
  this.systemPluginsRegistry = new p(), this.systemPluginsRegistry.registerPlugins([
1230
- new v(t.sessionHistory),
1460
+ new x(t.sessionHistory),
1461
+ new S(),
1231
1462
  new E(),
1232
- new x(),
1233
1463
  new m(),
1234
- new M()
1235
- ]), this.systemPluginsRegistry.registerPlugins(this.boothPluginRegistry.getPlugins()), this.callProcessor = new B(
1464
+ new A()
1465
+ ]), this.systemPluginsRegistry.registerPlugins(this.boothPluginRegistry.getPlugins()), this.callProcessor = new T(
1236
1466
  this.boothRegistry,
1237
1467
  this.systemPluginsRegistry,
1238
1468
  this.toolRegistry,
@@ -1242,15 +1472,17 @@ class A {
1242
1472
  }
1243
1473
  export {
1244
1474
  p as BoothPluginRegistry,
1245
- R as BoothRegistry,
1246
- E as ContextProviderPlugin,
1247
- v as ConversationHistoryPlugin,
1248
- A as CoreBooth,
1249
- M as FinishTurnPlugin,
1250
- B as InteractionProcessor,
1475
+ B as BoothRegistry,
1476
+ S as ContextProviderPlugin,
1477
+ x as ConversationHistoryPlugin,
1478
+ L as CoreBooth,
1479
+ A as FinishTurnPlugin,
1480
+ T as InteractionProcessor,
1481
+ $ as StreamingLoggerPlugin,
1482
+ k as StreamingUIPlugin,
1251
1483
  m as ToolExecutorPlugin,
1252
- x as ToolProviderPlugin,
1253
- _ as ToolRegistry,
1254
- P as createCoreBooth,
1484
+ E as ToolProviderPlugin,
1485
+ w as ToolRegistry,
1486
+ M as createCoreBooth,
1255
1487
  y as createRouteToBoothTool
1256
1488
  };
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "booths",
3
3
  "private": false,
4
- "version": "1.2.0",
4
+ "version": "1.3.0",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
7
7
  "module": "./dist/index.js",
@@ -19,11 +19,17 @@
19
19
  "build:pack": "npm install --package-lock-only && npm run build && npm pack",
20
20
  "build": "tsc && vite build",
21
21
  "format": "prettier --write \"src/**/*.{js,ts,json,css,scss,md}\"",
22
- "typecheck": "tsc --noEmit"
22
+ "typecheck": "tsc --noEmit",
23
+ "test": "vitest",
24
+ "test:watch": "vitest --watch",
25
+ "test:ui": "vitest --ui",
26
+ "test:coverage": "vitest --coverage"
23
27
  },
24
28
  "devDependencies": {
25
29
  "@eslint/js": "^9.30.1",
26
30
  "@types/node": "^24.0.11",
31
+ "@vitest/coverage-v8": "^3.2.4",
32
+ "@vitest/ui": "^3.2.4",
27
33
  "dotenv": "^17.2.0",
28
34
  "eslint": "^9.30.1",
29
35
  "eslint-config-prettier": "^10.1.5",
@@ -33,9 +39,11 @@
33
39
  "ts-node": "^10.9.2",
34
40
  "typescript": "~5.8.3",
35
41
  "typescript-eslint": "^8.36.0",
42
+ "vi-fetch": "^0.8.0",
36
43
  "vite": "^6.3.5",
37
44
  "vite-plugin-dts": "^4.5.4",
38
- "vite-tsconfig-paths": "^5.1.4"
45
+ "vite-tsconfig-paths": "^5.1.4",
46
+ "vitest": "^3.2.4"
39
47
  },
40
48
  "peerDependencies": {
41
49
  "openai": "^5.8.2"