booths 1.2.0 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +269 -2
- package/dist/index.d.ts +206 -1
- package/dist/index.js +339 -72
- package/package.json +11 -3
package/README.md
CHANGED
|
@@ -42,7 +42,7 @@ graph TD
|
|
|
42
42
|
1. **Application Layer**: Your application integrates the Booths framework to handle conversational AI interactions.
|
|
43
43
|
2. **`CoreBooth`**: The framework foundation that provides global functionality, instructions, and infrastructure that applies to all booths. It manages the overall system configuration and coordinates the interaction flow.
|
|
44
44
|
3. **`InteractionProcessor`**: The engine that drives the conversation. It takes user input, runs it through the plugin lifecycle, sends it to the LLM (via the adapter), and processes the response.
|
|
45
|
-
4. **`LLMAdapter`**: A component that handles communication with the specific LLM provider (e.g., OpenAI). It translates requests and responses between the Booths system and the LLM's API.
|
|
45
|
+
4. **`LLMAdapter`**: A component that handles communication with the specific LLM provider (e.g., OpenAI). It translates requests and responses between the Booths system and the LLM's API. Supports both traditional and streaming response modes.
|
|
46
46
|
5. **Registries**: These are responsible for managing the different components of the system:
|
|
47
47
|
* `BoothRegistry`: Manages `BoothConfig` objects that define the behavior of different AI agents.
|
|
48
48
|
* `ToolRegistry`: Manages the tools (functions) that booths can use.
|
|
@@ -124,7 +124,13 @@ The `CoreBooth` requires an `LLMAdapter` to communicate with your chosen languag
|
|
|
124
124
|
|
|
125
125
|
```typescript
|
|
126
126
|
// in OpenAIAdapter.ts
|
|
127
|
-
import type {
|
|
127
|
+
import type {
|
|
128
|
+
LLMAdapter,
|
|
129
|
+
ResponseCreateParamsNonStreaming,
|
|
130
|
+
ResponseCreateParamsStreaming,
|
|
131
|
+
Response,
|
|
132
|
+
StreamEvent
|
|
133
|
+
} from 'booths';
|
|
128
134
|
import OpenAI from 'openai';
|
|
129
135
|
|
|
130
136
|
export class OpenAIAdapter implements LLMAdapter<Response> {
|
|
@@ -141,6 +147,24 @@ export class OpenAIAdapter implements LLMAdapter<Response> {
|
|
|
141
147
|
async interpret(response: Response): Promise<Response> {
|
|
142
148
|
return response;
|
|
143
149
|
}
|
|
150
|
+
|
|
151
|
+
// Optional: Add streaming support
|
|
152
|
+
async *invokeStream(params: ResponseCreateParamsStreaming): AsyncIterable<Response> {
|
|
153
|
+
const stream = this.openai.responses.create({ ...params, model: 'gpt-4o', stream: true });
|
|
154
|
+
for await (const chunk of stream) {
|
|
155
|
+
yield chunk;
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
async interpretStream(chunk: Response): Promise<StreamEvent> {
|
|
160
|
+
// Convert OpenAI stream chunks to StreamEvents
|
|
161
|
+
// Implementation depends on your streaming format
|
|
162
|
+
return {
|
|
163
|
+
type: 'text_delta',
|
|
164
|
+
delta: chunk.choices?.[0]?.delta?.content || '',
|
|
165
|
+
content: chunk.choices?.[0]?.delta?.content || ''
|
|
166
|
+
};
|
|
167
|
+
}
|
|
144
168
|
}
|
|
145
169
|
```
|
|
146
170
|
|
|
@@ -195,6 +219,7 @@ Plugins are classes that implement the `BoothPlugin` interface. They can execute
|
|
|
195
219
|
- `onBeforeToolCall`: Before each individual tool call is executed _(allows modification of tool parameters, validation, and logging)_.
|
|
196
220
|
- `onAfterToolCall`: After each individual tool call is successfully executed _(allows result processing, caching, and transformation)_.
|
|
197
221
|
- `onToolCallError`: When a tool call encounters an error _(allows custom error handling and recovery)_.
|
|
222
|
+
- `onStreamEvent`: _(Optional)_ During streaming response generation, called for each stream event _(enables real-time processing and UI updates)_.
|
|
198
223
|
- `shouldEndInteractionLoop`: To determine if the conversation turn is over.
|
|
199
224
|
- `onAfterInteractionLoopEnd`: After the main loop has finished.
|
|
200
225
|
|
|
@@ -227,3 +252,245 @@ The `InteractionProcessor` is the engine of the system. It manages the interacti
|
|
|
227
252
|
5. Runs the `onResponseReceived` plugin hooks to process the response (e.g., execute tools).
|
|
228
253
|
6. Repeats this loop until a plugin's `shouldEndInteractionLoop` returns `true`.
|
|
229
254
|
7. Runs the `onAfter...` plugin hooks for cleanup.
|
|
255
|
+
|
|
256
|
+
## Streaming Support
|
|
257
|
+
|
|
258
|
+
The Booths framework includes comprehensive streaming support that enables real-time response generation while preserving the full plugin ecosystem and backward compatibility.
|
|
259
|
+
|
|
260
|
+
### Overview
|
|
261
|
+
|
|
262
|
+
Streaming allows the LLM's response to be processed and displayed in real-time as it's being generated, providing a more responsive user experience. The framework handles streaming at multiple levels:
|
|
263
|
+
|
|
264
|
+
- **Real-time Events**: Stream events are emitted as content arrives
|
|
265
|
+
- **Plugin Integration**: Plugins can hook into streaming events for real-time processing
|
|
266
|
+
- **Complete Responses**: Existing plugins continue to receive complete responses
|
|
267
|
+
- **Automatic Fallback**: Graceful fallback to non-streaming if streaming fails
|
|
268
|
+
|
|
269
|
+
### Enabling Streaming
|
|
270
|
+
|
|
271
|
+
Streaming can be enabled simply by setting a boolean flag when creating the `InteractionProcessor`:
|
|
272
|
+
|
|
273
|
+
```typescript
|
|
274
|
+
import { InteractionProcessor, type InteractionProcessorOptions } from 'booths';
|
|
275
|
+
|
|
276
|
+
const options: InteractionProcessorOptions = {
|
|
277
|
+
streaming: true, // Enable streaming
|
|
278
|
+
fallbackToNonStreaming: true // Optional: fallback if streaming fails
|
|
279
|
+
};
|
|
280
|
+
|
|
281
|
+
const processor = new InteractionProcessor(
|
|
282
|
+
boothRegistry,
|
|
283
|
+
pluginRegistry,
|
|
284
|
+
toolRegistry,
|
|
285
|
+
llmAdapter, // Must implement streaming methods
|
|
286
|
+
options
|
|
287
|
+
);
|
|
288
|
+
```
|
|
289
|
+
|
|
290
|
+
### Stream Events
|
|
291
|
+
|
|
292
|
+
The streaming system emits different types of events as the response is generated:
|
|
293
|
+
|
|
294
|
+
```typescript
|
|
295
|
+
export interface StreamEvent {
|
|
296
|
+
type: 'text_delta' | 'tool_call_start' | 'tool_call_end' | 'response_start' | 'response_end';
|
|
297
|
+
content?: string; // Full content for text events
|
|
298
|
+
delta?: string; // Incremental text for text_delta events
|
|
299
|
+
toolCall?: object; // Tool call information
|
|
300
|
+
metadata?: any; // Additional event metadata
|
|
301
|
+
}
|
|
302
|
+
```
|
|
303
|
+
|
|
304
|
+
**Event Types:**
|
|
305
|
+
- `response_start`: Streaming begins
|
|
306
|
+
- `text_delta`: Incremental text content arrives
|
|
307
|
+
- `tool_call_start`: LLM begins a tool call
|
|
308
|
+
- `tool_call_end`: Tool call completes
|
|
309
|
+
- `response_end`: Streaming completes
|
|
310
|
+
|
|
311
|
+
### Streaming Plugin Hooks
|
|
312
|
+
|
|
313
|
+
Plugins can implement the optional `onStreamEvent` hook to process stream events in real-time:
|
|
314
|
+
|
|
315
|
+
```typescript
|
|
316
|
+
import type { BoothPlugin, StreamEvent, StreamContext, RepositoryUtilities } from 'booths';
|
|
317
|
+
|
|
318
|
+
export class MyStreamingPlugin implements BoothPlugin {
|
|
319
|
+
id = 'my-streaming-plugin';
|
|
320
|
+
name = 'My Streaming Plugin';
|
|
321
|
+
description = 'Handles streaming events';
|
|
322
|
+
|
|
323
|
+
async onStreamEvent(
|
|
324
|
+
utilities: RepositoryUtilities,
|
|
325
|
+
streamEvent: StreamEvent,
|
|
326
|
+
context: StreamContext
|
|
327
|
+
): Promise<StreamEvent> {
|
|
328
|
+
// Process the stream event
|
|
329
|
+
if (streamEvent.type === 'text_delta') {
|
|
330
|
+
console.log(`Received text: ${streamEvent.delta}`);
|
|
331
|
+
|
|
332
|
+
// Optionally transform the event
|
|
333
|
+
return {
|
|
334
|
+
...streamEvent,
|
|
335
|
+
delta: streamEvent.delta?.toUpperCase() // Example transformation
|
|
336
|
+
};
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
return streamEvent; // Pass through unchanged
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
async shouldEndInteractionLoop(): Promise<boolean> {
|
|
343
|
+
return false;
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
```
|
|
347
|
+
|
|
348
|
+
### Built-in Streaming Plugins
|
|
349
|
+
|
|
350
|
+
The framework includes example streaming plugins:
|
|
351
|
+
|
|
352
|
+
#### StreamingLoggerPlugin
|
|
353
|
+
|
|
354
|
+
Logs streaming events in real-time for debugging and monitoring:
|
|
355
|
+
|
|
356
|
+
```typescript
|
|
357
|
+
import { StreamingLoggerPlugin } from 'booths';
|
|
358
|
+
|
|
359
|
+
const logger = new StreamingLoggerPlugin('[MyApp]');
|
|
360
|
+
pluginRegistry.registerPlugins([logger]);
|
|
361
|
+
```
|
|
362
|
+
|
|
363
|
+
#### StreamingUIPlugin
|
|
364
|
+
|
|
365
|
+
Provides real-time UI updates with customizable callbacks:
|
|
366
|
+
|
|
367
|
+
```typescript
|
|
368
|
+
import { StreamingUIPlugin } from 'booths';
|
|
369
|
+
|
|
370
|
+
const uiPlugin = new StreamingUIPlugin((event, context) => {
|
|
371
|
+
if (event.type === 'text_delta') {
|
|
372
|
+
// Update your UI with the new text
|
|
373
|
+
document.getElementById('response').textContent += event.delta;
|
|
374
|
+
}
|
|
375
|
+
});
|
|
376
|
+
|
|
377
|
+
pluginRegistry.registerPlugins([uiPlugin]);
|
|
378
|
+
```
|
|
379
|
+
|
|
380
|
+
### LLM Adapter Streaming Implementation
|
|
381
|
+
|
|
382
|
+
To support streaming, your LLM adapter should implement the optional streaming methods:
|
|
383
|
+
|
|
384
|
+
```typescript
|
|
385
|
+
export class MyStreamingAdapter implements LLMAdapter<MyResponse> {
|
|
386
|
+
// Required methods
|
|
387
|
+
async invoke(params: ResponseCreateParamsNonStreaming): Promise<MyResponse> {
|
|
388
|
+
// Non-streaming implementation
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
async interpret(response: MyResponse): Promise<Response> {
|
|
392
|
+
// Convert to standard format
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
// Optional streaming methods
|
|
396
|
+
async *invokeStream(params: ResponseCreateParamsStreaming): AsyncIterable<MyResponse> {
|
|
397
|
+
// Yield streaming chunks
|
|
398
|
+
const stream = await this.llm.createStreamingResponse(params);
|
|
399
|
+
for await (const chunk of stream) {
|
|
400
|
+
yield chunk;
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
async interpretStream(chunk: MyResponse): Promise<StreamEvent> {
|
|
405
|
+
// Convert chunk to StreamEvent
|
|
406
|
+
return {
|
|
407
|
+
type: 'text_delta',
|
|
408
|
+
delta: chunk.delta,
|
|
409
|
+
content: chunk.content
|
|
410
|
+
};
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
```
|
|
414
|
+
|
|
415
|
+
### Stream Context
|
|
416
|
+
|
|
417
|
+
Plugins receive context information about the streaming session:
|
|
418
|
+
|
|
419
|
+
```typescript
|
|
420
|
+
export interface StreamContext {
|
|
421
|
+
responseParams: ResponseCreateParamsNonStreaming; // Original request
|
|
422
|
+
streamIndex: number; // Event index in stream
|
|
423
|
+
totalExpectedEvents?: number; // Expected total (if known)
|
|
424
|
+
accumulatedResponse: Partial<Response>; // Response built so far
|
|
425
|
+
}
|
|
426
|
+
```
|
|
427
|
+
|
|
428
|
+
### Error Handling
|
|
429
|
+
|
|
430
|
+
The streaming system includes robust error handling:
|
|
431
|
+
|
|
432
|
+
- **Plugin Error Isolation**: Errors in streaming plugins don't break the stream
|
|
433
|
+
- **Automatic Fallback**: Can fallback to non-streaming mode on errors
|
|
434
|
+
- **Graceful Degradation**: System continues operating if streaming fails
|
|
435
|
+
|
|
436
|
+
### Backward Compatibility
|
|
437
|
+
|
|
438
|
+
Streaming support is fully backward compatible:
|
|
439
|
+
|
|
440
|
+
- **Existing Plugins**: Continue to work unchanged
|
|
441
|
+
- **Complete Responses**: Plugins still receive full `Response` objects
|
|
442
|
+
- **Optional Implementation**: Adapters don't require streaming support
|
|
443
|
+
- **Default Behavior**: Non-streaming mode by default
|
|
444
|
+
|
|
445
|
+
### Example: Complete Streaming Setup
|
|
446
|
+
|
|
447
|
+
Here's a complete example showing streaming integration:
|
|
448
|
+
|
|
449
|
+
```typescript
|
|
450
|
+
import {
|
|
451
|
+
InteractionProcessor,
|
|
452
|
+
BoothRegistry,
|
|
453
|
+
BoothPluginRegistry,
|
|
454
|
+
ToolRegistry,
|
|
455
|
+
StreamingLoggerPlugin,
|
|
456
|
+
StreamingUIPlugin,
|
|
457
|
+
type InteractionProcessorOptions
|
|
458
|
+
} from 'booths';
|
|
459
|
+
|
|
460
|
+
// 1. Create streaming-enabled adapter (implement streaming methods)
|
|
461
|
+
const streamingAdapter = new MyStreamingLLMAdapter(apiKey);
|
|
462
|
+
|
|
463
|
+
// 2. Set up registries and booth
|
|
464
|
+
const testBooth = { id: 'chat-booth', role: 'Assistant', description: 'Helpful assistant' };
|
|
465
|
+
const boothRegistry = new BoothRegistry(testBooth);
|
|
466
|
+
const pluginRegistry = new BoothPluginRegistry();
|
|
467
|
+
const toolRegistry = new ToolRegistry();
|
|
468
|
+
|
|
469
|
+
// 3. Set up streaming plugins
|
|
470
|
+
const logger = new StreamingLoggerPlugin('[Chat]');
|
|
471
|
+
const uiUpdater = new StreamingUIPlugin((event) => {
|
|
472
|
+
if (event.type === 'text_delta') {
|
|
473
|
+
document.getElementById('chat').textContent += event.delta;
|
|
474
|
+
}
|
|
475
|
+
});
|
|
476
|
+
|
|
477
|
+
pluginRegistry.registerPlugins([logger, uiUpdater]);
|
|
478
|
+
|
|
479
|
+
// 4. Enable streaming
|
|
480
|
+
const streamingOptions: InteractionProcessorOptions = {
|
|
481
|
+
streaming: true,
|
|
482
|
+
fallbackToNonStreaming: true
|
|
483
|
+
};
|
|
484
|
+
|
|
485
|
+
const processor = new InteractionProcessor(
|
|
486
|
+
boothRegistry,
|
|
487
|
+
pluginRegistry,
|
|
488
|
+
toolRegistry,
|
|
489
|
+
streamingAdapter,
|
|
490
|
+
streamingOptions
|
|
491
|
+
);
|
|
492
|
+
|
|
493
|
+
// 5. Send message with real-time streaming
|
|
494
|
+
const response = await processor.send('Hello, stream this response!');
|
|
495
|
+
// User sees content appear in real-time, plugins receive complete response
|
|
496
|
+
```
|
package/dist/index.d.ts
CHANGED
|
@@ -150,6 +150,15 @@ export declare interface BoothPlugin {
|
|
|
150
150
|
* @returns The potentially modified final response.
|
|
151
151
|
*/
|
|
152
152
|
onAfterInteractionLoopEnd?: (interactionLoopEndArgs: RepositoryUtilities, response: Response_2) => Promise<Response_2>;
|
|
153
|
+
/**
|
|
154
|
+
* Called for each streaming event as it arrives during response generation.
|
|
155
|
+
* This is optional and only called when streaming is enabled.
|
|
156
|
+
* @param utilities - Utilities for accessing repositories.
|
|
157
|
+
* @param streamEvent - The streaming event that was received.
|
|
158
|
+
* @param context - Context information about the streaming session.
|
|
159
|
+
* @returns The potentially modified stream event, or void to pass through unchanged.
|
|
160
|
+
*/
|
|
161
|
+
onStreamEvent?: (utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext) => Promise<StreamEvent | void>;
|
|
153
162
|
}
|
|
154
163
|
|
|
155
164
|
/**
|
|
@@ -293,6 +302,17 @@ export declare class BoothPluginRegistry {
|
|
|
293
302
|
* @returns Error result or recovery value after all plugins have processed it
|
|
294
303
|
*/
|
|
295
304
|
runToolCallError(utilities: RepositoryUtilities, toolCall: ResponseFunctionToolCall, error: Error, context: ToolCallContext): Promise<any>;
|
|
305
|
+
/**
|
|
306
|
+
* Sequentially invokes every plugin's onStreamEvent hook.
|
|
307
|
+
* This is called for each streaming event during response generation,
|
|
308
|
+
* allowing plugins to process or modify stream events in real-time.
|
|
309
|
+
*
|
|
310
|
+
* @param utilities - Context information including booth and tool registries
|
|
311
|
+
* @param streamEvent - The streaming event that was received
|
|
312
|
+
* @param context - Context information about the streaming session
|
|
313
|
+
* @returns Modified stream event after all plugins have processed it
|
|
314
|
+
*/
|
|
315
|
+
runStreamEvent(utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext): Promise<StreamEvent>;
|
|
296
316
|
}
|
|
297
317
|
|
|
298
318
|
/**
|
|
@@ -765,7 +785,22 @@ export declare class InteractionProcessor<T> {
|
|
|
765
785
|
private boothPlugins;
|
|
766
786
|
private toolRegistry;
|
|
767
787
|
private llmAdapter;
|
|
788
|
+
/**
|
|
789
|
+
* Generates a consistent ID for responses and messages.
|
|
790
|
+
* @param prefix - The prefix for the ID (e.g., 'stream', 'error', 'msg')
|
|
791
|
+
* @returns A unique ID string
|
|
792
|
+
* @private
|
|
793
|
+
*/
|
|
794
|
+
private generateId;
|
|
795
|
+
/**
|
|
796
|
+
* Creates a standardized message object for responses.
|
|
797
|
+
* @param text - The text content for the message
|
|
798
|
+
* @returns A formatted message object
|
|
799
|
+
* @private
|
|
800
|
+
*/
|
|
801
|
+
private createMessage;
|
|
768
802
|
private loopLimit;
|
|
803
|
+
private options;
|
|
769
804
|
/**
|
|
770
805
|
* Creates a synthetic error response with proper structure and error details.
|
|
771
806
|
* @param error - The error that occurred
|
|
@@ -781,6 +816,58 @@ export declare class InteractionProcessor<T> {
|
|
|
781
816
|
* @private
|
|
782
817
|
*/
|
|
783
818
|
private callLLM;
|
|
819
|
+
/**
|
|
820
|
+
* Calls the LLM in non-streaming mode.
|
|
821
|
+
* @param responseCreateParams - The parameters for creating the response.
|
|
822
|
+
* @returns A promise that resolves with the LLM's response.
|
|
823
|
+
* @private
|
|
824
|
+
*/
|
|
825
|
+
private callLLMNonStreaming;
|
|
826
|
+
/**
|
|
827
|
+
* Calls the LLM in streaming mode, accumulating stream events into a complete response.
|
|
828
|
+
* @param responseCreateParams - The parameters for creating the response.
|
|
829
|
+
* @returns A promise that resolves with the accumulated response.
|
|
830
|
+
* @private
|
|
831
|
+
*/
|
|
832
|
+
private callLLMStreaming;
|
|
833
|
+
/**
|
|
834
|
+
* Type guard to check if an output item has a function property (tool call).
|
|
835
|
+
* @param output - The output item to check
|
|
836
|
+
* @returns True if the output item is a tool call with a function
|
|
837
|
+
* @private
|
|
838
|
+
*/
|
|
839
|
+
private isToolCall;
|
|
840
|
+
/**
|
|
841
|
+
* Helper function to safely update tool call arguments.
|
|
842
|
+
* @param accumulated - The accumulated response
|
|
843
|
+
* @param callId - The tool call ID to update
|
|
844
|
+
* @param delta - The argument delta to append
|
|
845
|
+
* @private
|
|
846
|
+
*/
|
|
847
|
+
private updateToolCallArguments;
|
|
848
|
+
/**
|
|
849
|
+
* Helper function to safely update a tool call with final data.
|
|
850
|
+
* @param accumulated - The accumulated response
|
|
851
|
+
* @param toolCallData - The final tool call data
|
|
852
|
+
* @private
|
|
853
|
+
*/
|
|
854
|
+
private updateToolCallFunction;
|
|
855
|
+
/**
|
|
856
|
+
* Merges a stream event into the accumulated response.
|
|
857
|
+
* @param accumulated - The current accumulated response.
|
|
858
|
+
* @param streamEvent - The stream event to merge.
|
|
859
|
+
* @returns The updated accumulated response.
|
|
860
|
+
* @private
|
|
861
|
+
*/
|
|
862
|
+
private mergeStreamEvent;
|
|
863
|
+
/**
|
|
864
|
+
* Creates a complete Response object from accumulated stream data.
|
|
865
|
+
* @param accumulated - The accumulated response data.
|
|
866
|
+
* @param originalParams - The original request parameters.
|
|
867
|
+
* @returns A complete Response object.
|
|
868
|
+
* @private
|
|
869
|
+
*/
|
|
870
|
+
private finalizeAccumulatedResponse;
|
|
784
871
|
/**
|
|
785
872
|
* Runs the main interaction loop, sending messages to the LLM and processing
|
|
786
873
|
* the responses through the registered plugins.
|
|
@@ -795,8 +882,9 @@ export declare class InteractionProcessor<T> {
|
|
|
795
882
|
* @param boothPlugins - The registry for booth plugins.
|
|
796
883
|
* @param toolRegistry - The registry for available tools.
|
|
797
884
|
* @param llmAdapter - The adapter for interacting with the LLM.
|
|
885
|
+
* @param options - Configuration options for streaming and other behaviors.
|
|
798
886
|
*/
|
|
799
|
-
constructor(boothRegistry: BoothRegistry, boothPlugins: BoothPluginRegistry, toolRegistry: ToolRegistry, llmAdapter: LLMAdapter<T
|
|
887
|
+
constructor(boothRegistry: BoothRegistry, boothPlugins: BoothPluginRegistry, toolRegistry: ToolRegistry, llmAdapter: LLMAdapter<T>, options?: InteractionProcessorOptions);
|
|
800
888
|
/**
|
|
801
889
|
* Sends a message to the LLM and processes the response through the interaction loop.
|
|
802
890
|
* This involves running pre-loop, pre-send, response-received, and post-loop plugin hooks.
|
|
@@ -806,9 +894,23 @@ export declare class InteractionProcessor<T> {
|
|
|
806
894
|
send(input: string | ResponseInput): Promise<Response_2>;
|
|
807
895
|
}
|
|
808
896
|
|
|
897
|
+
/**
|
|
898
|
+
* Configuration options for the InteractionProcessor.
|
|
899
|
+
*/
|
|
900
|
+
export declare interface InteractionProcessorOptions {
|
|
901
|
+
/** Enable streaming mode for LLM responses */
|
|
902
|
+
streaming?: boolean;
|
|
903
|
+
/** Fallback to non-streaming if streaming fails */
|
|
904
|
+
fallbackToNonStreaming?: boolean;
|
|
905
|
+
}
|
|
906
|
+
|
|
809
907
|
export declare interface LLMAdapter<LLMResponse = any> {
|
|
810
908
|
invoke: (responseParams: ResponseCreateParamsNonStreaming) => Promise<LLMResponse>;
|
|
811
909
|
interpret: (response: LLMResponse) => Promise<Response_2>;
|
|
910
|
+
/** Optional method for streaming LLM responses */
|
|
911
|
+
invokeStream?: (responseParams: ResponseCreateParamsStreaming) => AsyncIterable<LLMResponse>;
|
|
912
|
+
/** Optional method for interpreting individual stream chunks into StreamEvents */
|
|
913
|
+
interpretStream?: (streamChunk: LLMResponse) => Promise<StreamEvent>;
|
|
812
914
|
}
|
|
813
915
|
|
|
814
916
|
/**
|
|
@@ -832,8 +934,24 @@ export declare type RepositoryUtilities = {
|
|
|
832
934
|
llmAdapter: LLMAdapter<unknown>;
|
|
833
935
|
};
|
|
834
936
|
|
|
937
|
+
export { Response_2 as Response }
|
|
938
|
+
|
|
835
939
|
export { ResponseCreateParamsNonStreaming }
|
|
836
940
|
|
|
941
|
+
/**
|
|
942
|
+
* Response parameters for streaming requests.
|
|
943
|
+
* This creates a new type that has all the properties of ResponseCreateParamsNonStreaming
|
|
944
|
+
* but with stream: true instead of stream: false.
|
|
945
|
+
*/
|
|
946
|
+
export declare type ResponseCreateParamsStreaming = Omit<ResponseCreateParamsNonStreaming, 'stream'> & {
|
|
947
|
+
/** Must be true for streaming requests */
|
|
948
|
+
stream: true;
|
|
949
|
+
};
|
|
950
|
+
|
|
951
|
+
export { ResponseInput }
|
|
952
|
+
|
|
953
|
+
export { ResponseInputItem }
|
|
954
|
+
|
|
837
955
|
/**
|
|
838
956
|
* Represents the result of processing a single tool call.
|
|
839
957
|
*/
|
|
@@ -854,6 +972,93 @@ export declare type SingleToolProcessingResult = {
|
|
|
854
972
|
toolExecuted: boolean;
|
|
855
973
|
};
|
|
856
974
|
|
|
975
|
+
/**
|
|
976
|
+
* Context information provided during streaming event processing.
|
|
977
|
+
*/
|
|
978
|
+
export declare interface StreamContext {
|
|
979
|
+
/** The current response parameters being processed */
|
|
980
|
+
responseParams: ResponseCreateParamsNonStreaming;
|
|
981
|
+
/** Index of this stream event in the sequence */
|
|
982
|
+
streamIndex: number;
|
|
983
|
+
/** Total expected number of events (if known) */
|
|
984
|
+
totalExpectedEvents?: number;
|
|
985
|
+
/** Accumulated response content so far */
|
|
986
|
+
accumulatedResponse: Partial<Response_2>;
|
|
987
|
+
}
|
|
988
|
+
|
|
989
|
+
/**
|
|
990
|
+
* Represents a streaming event emitted during LLM response generation.
|
|
991
|
+
*/
|
|
992
|
+
export declare interface StreamEvent {
|
|
993
|
+
/** Type of stream event */
|
|
994
|
+
type: 'text_delta' | 'tool_call_start' | 'tool_call_end' | 'response_start' | 'response_end';
|
|
995
|
+
/** Text content for text_delta events */
|
|
996
|
+
content?: string;
|
|
997
|
+
/** Incremental text delta for text_delta events */
|
|
998
|
+
delta?: string;
|
|
999
|
+
/** Tool call information for tool-related events */
|
|
1000
|
+
toolCall?: ResponseFunctionToolCall;
|
|
1001
|
+
/** Additional metadata for the event */
|
|
1002
|
+
metadata?: Record<string, unknown>;
|
|
1003
|
+
}
|
|
1004
|
+
|
|
1005
|
+
/**
|
|
1006
|
+
* Callback function type for handling stream events in the UI.
|
|
1007
|
+
*/
|
|
1008
|
+
export declare type StreamEventCallback = (event: StreamEvent, context: StreamContext) => void;
|
|
1009
|
+
|
|
1010
|
+
/**
|
|
1011
|
+
* Example streaming plugin that logs stream events in real-time.
|
|
1012
|
+
* This demonstrates how to implement streaming hooks in plugins.
|
|
1013
|
+
*/
|
|
1014
|
+
export declare class StreamingLoggerPlugin implements BoothPlugin {
|
|
1015
|
+
readonly id = "streaming-logger";
|
|
1016
|
+
readonly name = "Streaming Logger Plugin";
|
|
1017
|
+
readonly description = "Logs streaming events in real-time for debugging and monitoring";
|
|
1018
|
+
private logPrefix;
|
|
1019
|
+
constructor(logPrefix?: string);
|
|
1020
|
+
/**
|
|
1021
|
+
* Handle individual stream events as they arrive.
|
|
1022
|
+
* This allows for real-time processing and logging of streaming content.
|
|
1023
|
+
*/
|
|
1024
|
+
onStreamEvent(_utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext): Promise<StreamEvent>;
|
|
1025
|
+
/**
|
|
1026
|
+
* Required method - determines whether to end the interaction loop.
|
|
1027
|
+
* For a logging plugin, we never want to end the loop ourselves.
|
|
1028
|
+
*/
|
|
1029
|
+
shouldEndInteractionLoop(): Promise<boolean>;
|
|
1030
|
+
}
|
|
1031
|
+
|
|
1032
|
+
/**
|
|
1033
|
+
* Example streaming plugin that provides real-time UI updates.
|
|
1034
|
+
* This plugin demonstrates how to emit stream events to the UI layer.
|
|
1035
|
+
*/
|
|
1036
|
+
export declare class StreamingUIPlugin implements BoothPlugin {
|
|
1037
|
+
readonly id = "streaming-ui";
|
|
1038
|
+
readonly name = "Streaming UI Plugin";
|
|
1039
|
+
readonly description = "Provides real-time UI updates during streaming responses";
|
|
1040
|
+
private onStreamCallback?;
|
|
1041
|
+
constructor(onStreamCallback?: StreamEventCallback);
|
|
1042
|
+
/**
|
|
1043
|
+
* Handle individual stream events and emit them to the UI layer.
|
|
1044
|
+
* This enables real-time updates to the user interface.
|
|
1045
|
+
*/
|
|
1046
|
+
onStreamEvent(_utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext): Promise<StreamEvent>;
|
|
1047
|
+
/**
|
|
1048
|
+
* Set or update the stream callback for UI updates.
|
|
1049
|
+
*/
|
|
1050
|
+
setStreamCallback(callback: StreamEventCallback): void;
|
|
1051
|
+
/**
|
|
1052
|
+
* Remove the stream callback.
|
|
1053
|
+
*/
|
|
1054
|
+
removeStreamCallback(): void;
|
|
1055
|
+
/**
|
|
1056
|
+
* Required method - determines whether to end the interaction loop.
|
|
1057
|
+
* For a UI plugin, we never want to end the loop ourselves.
|
|
1058
|
+
*/
|
|
1059
|
+
shouldEndInteractionLoop(): Promise<boolean>;
|
|
1060
|
+
}
|
|
1061
|
+
|
|
857
1062
|
/**
|
|
858
1063
|
* Context information provided during tool call execution.
|
|
859
1064
|
*/
|
package/dist/index.js
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { randomUUID as R } from "crypto";
|
|
1
2
|
class p {
|
|
2
3
|
/**
|
|
3
4
|
* Collection of registered plugins.
|
|
@@ -180,8 +181,8 @@ class p {
|
|
|
180
181
|
*/
|
|
181
182
|
async runAfterToolCall(t, o, e, r) {
|
|
182
183
|
let s = e;
|
|
183
|
-
for (const
|
|
184
|
-
|
|
184
|
+
for (const i of this.plugins)
|
|
185
|
+
i.onAfterToolCall && (s = await i.onAfterToolCall(t, o, s, r));
|
|
185
186
|
return s;
|
|
186
187
|
}
|
|
187
188
|
/**
|
|
@@ -197,10 +198,32 @@ class p {
|
|
|
197
198
|
*/
|
|
198
199
|
async runToolCallError(t, o, e, r) {
|
|
199
200
|
let s = `Error: ${e.message}`;
|
|
200
|
-
for (const
|
|
201
|
-
|
|
201
|
+
for (const i of this.plugins)
|
|
202
|
+
i.onToolCallError && (s = await i.onToolCallError(t, o, e, r));
|
|
202
203
|
return s;
|
|
203
204
|
}
|
|
205
|
+
/**
|
|
206
|
+
* Sequentially invokes every plugin's onStreamEvent hook.
|
|
207
|
+
* This is called for each streaming event during response generation,
|
|
208
|
+
* allowing plugins to process or modify stream events in real-time.
|
|
209
|
+
*
|
|
210
|
+
* @param utilities - Context information including booth and tool registries
|
|
211
|
+
* @param streamEvent - The streaming event that was received
|
|
212
|
+
* @param context - Context information about the streaming session
|
|
213
|
+
* @returns Modified stream event after all plugins have processed it
|
|
214
|
+
*/
|
|
215
|
+
async runStreamEvent(t, o, e) {
|
|
216
|
+
let r = o;
|
|
217
|
+
for (const s of this.plugins)
|
|
218
|
+
if (s.onStreamEvent)
|
|
219
|
+
try {
|
|
220
|
+
const i = await s.onStreamEvent(t, r, e);
|
|
221
|
+
i && (r = i);
|
|
222
|
+
} catch (i) {
|
|
223
|
+
console.error(`Error in plugin ${s.id} during stream event processing:`, i);
|
|
224
|
+
}
|
|
225
|
+
return r;
|
|
226
|
+
}
|
|
204
227
|
}
|
|
205
228
|
const u = {
|
|
206
229
|
id: "orchestrator",
|
|
@@ -237,7 +260,7 @@ const u = {
|
|
|
237
260
|
- User: "I need help" → "What specifically would you like help with?" → then route based on response
|
|
238
261
|
`
|
|
239
262
|
};
|
|
240
|
-
class
|
|
263
|
+
class B {
|
|
241
264
|
/**
|
|
242
265
|
* Creates a new booth registry with a specified base booth configuration.
|
|
243
266
|
*
|
|
@@ -434,18 +457,54 @@ class R {
|
|
|
434
457
|
).length <= 1 && this.hasOrchestrator && this.disableMultiBoothMode();
|
|
435
458
|
}
|
|
436
459
|
}
|
|
437
|
-
class
|
|
460
|
+
class T {
|
|
438
461
|
/**
|
|
439
462
|
* Creates an instance of InteractionProcessor.
|
|
440
463
|
* @param boothRegistry - The registry for booth configurations.
|
|
441
464
|
* @param boothPlugins - The registry for booth plugins.
|
|
442
465
|
* @param toolRegistry - The registry for available tools.
|
|
443
466
|
* @param llmAdapter - The adapter for interacting with the LLM.
|
|
467
|
+
* @param options - Configuration options for streaming and other behaviors.
|
|
468
|
+
*/
|
|
469
|
+
constructor(t, o, e, r, s) {
|
|
470
|
+
this.boothRegistry = t, this.boothPlugins = o, this.toolRegistry = e, this.llmAdapter = r, this.options = {
|
|
471
|
+
streaming: !1,
|
|
472
|
+
fallbackToNonStreaming: !0,
|
|
473
|
+
...s
|
|
474
|
+
};
|
|
475
|
+
}
|
|
476
|
+
/**
|
|
477
|
+
* Generates a consistent ID for responses and messages.
|
|
478
|
+
* @param prefix - The prefix for the ID (e.g., 'stream', 'error', 'msg')
|
|
479
|
+
* @returns A unique ID string
|
|
480
|
+
* @private
|
|
444
481
|
*/
|
|
445
|
-
|
|
446
|
-
|
|
482
|
+
generateId(t) {
|
|
483
|
+
return `${t}_${Date.now()}_${R()}`;
|
|
484
|
+
}
|
|
485
|
+
/**
|
|
486
|
+
* Creates a standardized message object for responses.
|
|
487
|
+
* @param text - The text content for the message
|
|
488
|
+
* @returns A formatted message object
|
|
489
|
+
* @private
|
|
490
|
+
*/
|
|
491
|
+
createMessage(t) {
|
|
492
|
+
return {
|
|
493
|
+
id: this.generateId("msg"),
|
|
494
|
+
content: [
|
|
495
|
+
{
|
|
496
|
+
type: "output_text",
|
|
497
|
+
text: t,
|
|
498
|
+
annotations: []
|
|
499
|
+
}
|
|
500
|
+
],
|
|
501
|
+
role: "assistant",
|
|
502
|
+
status: "completed",
|
|
503
|
+
type: "message"
|
|
504
|
+
};
|
|
447
505
|
}
|
|
448
506
|
loopLimit = 10;
|
|
507
|
+
options;
|
|
449
508
|
/**
|
|
450
509
|
* Creates a synthetic error response with proper structure and error details.
|
|
451
510
|
* @param error - The error that occurred
|
|
@@ -461,7 +520,7 @@ class B {
|
|
|
461
520
|
if (r && (s.code = r), o.model === void 0)
|
|
462
521
|
throw new Error("Model must be specified in response parameters for error handling.");
|
|
463
522
|
return {
|
|
464
|
-
id:
|
|
523
|
+
id: this.generateId("error"),
|
|
465
524
|
created_at: Math.floor(Date.now() / 1e3),
|
|
466
525
|
output_text: "An error occurred while communicating with the language model.",
|
|
467
526
|
error: s,
|
|
@@ -471,19 +530,7 @@ class B {
|
|
|
471
530
|
model: o.model,
|
|
472
531
|
object: "response",
|
|
473
532
|
output: [
|
|
474
|
-
{
|
|
475
|
-
id: `msg_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
|
|
476
|
-
content: [
|
|
477
|
-
{
|
|
478
|
-
type: "output_text",
|
|
479
|
-
text: `Error: ${e}. Please try again or contact support if the issue persists.`,
|
|
480
|
-
annotations: []
|
|
481
|
-
}
|
|
482
|
-
],
|
|
483
|
-
role: "assistant",
|
|
484
|
-
status: "completed",
|
|
485
|
-
type: "message"
|
|
486
|
-
}
|
|
533
|
+
this.createMessage(`Error: ${e}. Please try again or contact support if the issue persists.`)
|
|
487
534
|
],
|
|
488
535
|
parallel_tool_calls: o.parallel_tool_calls || !1,
|
|
489
536
|
temperature: o.temperature || null,
|
|
@@ -500,6 +547,21 @@ class B {
|
|
|
500
547
|
* @private
|
|
501
548
|
*/
|
|
502
549
|
async callLLM(t) {
|
|
550
|
+
if (this.options.streaming && this.llmAdapter.invokeStream && this.llmAdapter.interpretStream)
|
|
551
|
+
try {
|
|
552
|
+
return await this.callLLMStreaming(t);
|
|
553
|
+
} catch (o) {
|
|
554
|
+
return console.error("Error calling LLM with streaming:", o), this.options.fallbackToNonStreaming ? (console.warn("Falling back to non-streaming mode"), await this.callLLMNonStreaming(t)) : this.createErrorResponse(o, t);
|
|
555
|
+
}
|
|
556
|
+
return await this.callLLMNonStreaming(t);
|
|
557
|
+
}
|
|
558
|
+
/**
|
|
559
|
+
* Calls the LLM in non-streaming mode.
|
|
560
|
+
* @param responseCreateParams - The parameters for creating the response.
|
|
561
|
+
* @returns A promise that resolves with the LLM's response.
|
|
562
|
+
* @private
|
|
563
|
+
*/
|
|
564
|
+
async callLLMNonStreaming(t) {
|
|
503
565
|
try {
|
|
504
566
|
const o = await this.llmAdapter.invoke(t);
|
|
505
567
|
return await this.llmAdapter.interpret(o);
|
|
@@ -507,6 +569,126 @@ class B {
|
|
|
507
569
|
return console.error("Error calling LLM:", o), this.createErrorResponse(o, t);
|
|
508
570
|
}
|
|
509
571
|
}
|
|
572
|
+
/**
|
|
573
|
+
* Calls the LLM in streaming mode, accumulating stream events into a complete response.
|
|
574
|
+
* @param responseCreateParams - The parameters for creating the response.
|
|
575
|
+
* @returns A promise that resolves with the accumulated response.
|
|
576
|
+
* @private
|
|
577
|
+
*/
|
|
578
|
+
async callLLMStreaming(t) {
|
|
579
|
+
if (!this.llmAdapter.invokeStream || !this.llmAdapter.interpretStream)
|
|
580
|
+
throw new Error("Adapter does not support streaming");
|
|
581
|
+
const o = {
|
|
582
|
+
...t,
|
|
583
|
+
stream: !0
|
|
584
|
+
}, e = this.llmAdapter.invokeStream(o);
|
|
585
|
+
let r = {
|
|
586
|
+
output: [],
|
|
587
|
+
output_text: ""
|
|
588
|
+
}, s = 0;
|
|
589
|
+
for await (const i of e) {
|
|
590
|
+
const l = await this.llmAdapter.interpretStream(i), h = {
|
|
591
|
+
responseParams: t,
|
|
592
|
+
streamIndex: s,
|
|
593
|
+
accumulatedResponse: r
|
|
594
|
+
}, a = await this.boothPlugins.runStreamEvent(
|
|
595
|
+
{
|
|
596
|
+
toolRegistry: this.toolRegistry,
|
|
597
|
+
boothRegistry: this.boothRegistry,
|
|
598
|
+
pluginRegistry: this.boothPlugins,
|
|
599
|
+
llmAdapter: this.llmAdapter
|
|
600
|
+
},
|
|
601
|
+
l,
|
|
602
|
+
h
|
|
603
|
+
);
|
|
604
|
+
r = this.mergeStreamEvent(r, a), s++;
|
|
605
|
+
}
|
|
606
|
+
return this.finalizeAccumulatedResponse(r, t);
|
|
607
|
+
}
|
|
608
|
+
/**
|
|
609
|
+
* Type guard to check if an output item has a function property (tool call).
|
|
610
|
+
* @param output - The output item to check
|
|
611
|
+
* @returns True if the output item is a tool call with a function
|
|
612
|
+
* @private
|
|
613
|
+
*/
|
|
614
|
+
isToolCall(t) {
|
|
615
|
+
return t && "function" in t && t.function && typeof t.function.arguments == "string";
|
|
616
|
+
}
|
|
617
|
+
/**
|
|
618
|
+
* Helper function to safely update tool call arguments.
|
|
619
|
+
* @param accumulated - The accumulated response
|
|
620
|
+
* @param callId - The tool call ID to update
|
|
621
|
+
* @param delta - The argument delta to append
|
|
622
|
+
* @private
|
|
623
|
+
*/
|
|
624
|
+
updateToolCallArguments(t, o, e) {
|
|
625
|
+
if (!t.output) return;
|
|
626
|
+
const r = t.output.find((s) => s.id === o);
|
|
627
|
+
this.isToolCall(r) && (r.function.arguments = (r.function.arguments || "") + e);
|
|
628
|
+
}
|
|
629
|
+
/**
|
|
630
|
+
* Helper function to safely update a tool call with final data.
|
|
631
|
+
* @param accumulated - The accumulated response
|
|
632
|
+
* @param toolCallData - The final tool call data
|
|
633
|
+
* @private
|
|
634
|
+
*/
|
|
635
|
+
updateToolCallFunction(t, o) {
|
|
636
|
+
if (!t.output || !o?.id || !o?.function) return;
|
|
637
|
+
const e = t.output.find((r) => r.id === o.id);
|
|
638
|
+
this.isToolCall(e) && (e.function = o.function);
|
|
639
|
+
}
|
|
640
|
+
/**
|
|
641
|
+
* Merges a stream event into the accumulated response.
|
|
642
|
+
* @param accumulated - The current accumulated response.
|
|
643
|
+
* @param streamEvent - The stream event to merge.
|
|
644
|
+
* @returns The updated accumulated response.
|
|
645
|
+
* @private
|
|
646
|
+
*/
|
|
647
|
+
mergeStreamEvent(t, o) {
|
|
648
|
+
if (!o || !o.type)
|
|
649
|
+
return t;
|
|
650
|
+
switch (o.type) {
|
|
651
|
+
case "text_delta":
|
|
652
|
+
o.delta && !o.metadata?.call_id ? t.output_text = (t.output_text || "") + o.delta : o.delta && o.metadata?.call_id && typeof o.metadata.call_id == "string" && (t.output = t.output || [], this.updateToolCallArguments(t, o.metadata.call_id, o.delta));
|
|
653
|
+
break;
|
|
654
|
+
case "tool_call_start":
|
|
655
|
+
o.toolCall && (t.output = t.output || [], t.output.push(o.toolCall));
|
|
656
|
+
break;
|
|
657
|
+
case "tool_call_end":
|
|
658
|
+
o.toolCall && (t.output = t.output || [], this.updateToolCallFunction(t, o.toolCall));
|
|
659
|
+
break;
|
|
660
|
+
}
|
|
661
|
+
return t;
|
|
662
|
+
}
|
|
663
|
+
/**
|
|
664
|
+
* Creates a complete Response object from accumulated stream data.
|
|
665
|
+
* @param accumulated - The accumulated response data.
|
|
666
|
+
* @param originalParams - The original request parameters.
|
|
667
|
+
* @returns A complete Response object.
|
|
668
|
+
* @private
|
|
669
|
+
*/
|
|
670
|
+
finalizeAccumulatedResponse(t, o) {
|
|
671
|
+
return {
|
|
672
|
+
id: this.generateId("stream"),
|
|
673
|
+
created_at: Math.floor(Date.now() / 1e3),
|
|
674
|
+
output_text: t.output_text || "",
|
|
675
|
+
error: null,
|
|
676
|
+
incomplete_details: null,
|
|
677
|
+
instructions: null,
|
|
678
|
+
metadata: null,
|
|
679
|
+
model: o.model || "unknown",
|
|
680
|
+
object: "response",
|
|
681
|
+
output: t.output || [
|
|
682
|
+
this.createMessage(t.output_text || "")
|
|
683
|
+
],
|
|
684
|
+
parallel_tool_calls: o.parallel_tool_calls || !1,
|
|
685
|
+
temperature: o.temperature || null,
|
|
686
|
+
tool_choice: o.tool_choice || "auto",
|
|
687
|
+
tools: o.tools || [],
|
|
688
|
+
top_p: o.top_p || null,
|
|
689
|
+
status: "completed"
|
|
690
|
+
};
|
|
691
|
+
}
|
|
510
692
|
/**
|
|
511
693
|
* Runs the main interaction loop, sending messages to the LLM and processing
|
|
512
694
|
* the responses through the registered plugins.
|
|
@@ -588,7 +770,7 @@ class B {
|
|
|
588
770
|
), r;
|
|
589
771
|
}
|
|
590
772
|
}
|
|
591
|
-
const
|
|
773
|
+
const I = {
|
|
592
774
|
id: "summarizer",
|
|
593
775
|
role: 'You are a highly skilled summarization AI. Your task is to read a conversation history and provide a concise, neutral, and objective summary. The summary should capture the key points, decisions made, and any unresolved questions. It must be written from a third-person perspective and should be clear enough for another AI assistant to understand the full context and continue the conversation seamlessly without needing the original transcript. Do not add any conversational fluff or introductory phrases like "Here is the summary:".',
|
|
594
776
|
description: "A specialized booth for summarizing conversation histories."
|
|
@@ -644,7 +826,7 @@ ${o}
|
|
|
644
826
|
}
|
|
645
827
|
};
|
|
646
828
|
}
|
|
647
|
-
class
|
|
829
|
+
class x {
|
|
648
830
|
/**
|
|
649
831
|
* The sessionHistory variable stores the conversation history between the user and the booth system.
|
|
650
832
|
* It is initialized as an empty array and will be populated with messages exchanged during the interaction.
|
|
@@ -740,13 +922,13 @@ class v {
|
|
|
740
922
|
async onResponseReceived(t, o, e) {
|
|
741
923
|
let s = [...o.input, ...e?.output ?? []];
|
|
742
924
|
if (this.responseContainsBoothChange(e)) {
|
|
743
|
-
const
|
|
925
|
+
const l = `Please summarize the following conversation history:
|
|
744
926
|
|
|
745
|
-
${JSON.stringify(this.sessionHistory)}`,
|
|
927
|
+
${JSON.stringify(this.sessionHistory)}`, g = (await L(t.llmAdapter, I).callProcessor.send(l)).output_text, d = s.filter((c) => "role" in c && c.role === "user").pop(), b = {
|
|
746
928
|
role: "developer",
|
|
747
|
-
content: `A conversation summary up to this point: ${
|
|
748
|
-
},
|
|
749
|
-
this.sessionHistory =
|
|
929
|
+
content: `A conversation summary up to this point: ${g}`
|
|
930
|
+
}, _ = s.filter((c) => !("role" in c && c.role === "user" || "type" in c && c.type === "message"));
|
|
931
|
+
this.sessionHistory = d ? [..._, b, d] : [..._, b], s = this.sessionHistory;
|
|
750
932
|
} else
|
|
751
933
|
this.sessionHistory = s;
|
|
752
934
|
return {
|
|
@@ -764,7 +946,7 @@ ${JSON.stringify(this.sessionHistory)}`, d = (await P(t.llmAdapter, T).callProce
|
|
|
764
946
|
return !1;
|
|
765
947
|
}
|
|
766
948
|
}
|
|
767
|
-
class
|
|
949
|
+
class C {
|
|
768
950
|
/**
|
|
769
951
|
* Unique identifier for this plugin instance.
|
|
770
952
|
* @private
|
|
@@ -810,12 +992,12 @@ class E {
|
|
|
810
992
|
const e = t.boothRegistry;
|
|
811
993
|
let s = e.baseBoothConfig.description;
|
|
812
994
|
if (e.isMultiBoothMode) {
|
|
813
|
-
const
|
|
995
|
+
const i = e.orchestratorBoothConfig, l = e.currentContextBoothConfig;
|
|
814
996
|
s += `
|
|
815
997
|
|
|
816
|
-
${
|
|
998
|
+
${i.description}`, l.id !== i.id && (s += `
|
|
817
999
|
|
|
818
|
-
${
|
|
1000
|
+
${l.description}`);
|
|
819
1001
|
}
|
|
820
1002
|
return { ...o, instructions: s };
|
|
821
1003
|
}
|
|
@@ -829,7 +1011,7 @@ class E {
|
|
|
829
1011
|
return !1;
|
|
830
1012
|
}
|
|
831
1013
|
}
|
|
832
|
-
class
|
|
1014
|
+
class w {
|
|
833
1015
|
tools;
|
|
834
1016
|
/**
|
|
835
1017
|
* Initializes an empty Map to store tools.
|
|
@@ -925,7 +1107,7 @@ class _ {
|
|
|
925
1107
|
this.tools.delete(t);
|
|
926
1108
|
}
|
|
927
1109
|
}
|
|
928
|
-
function
|
|
1110
|
+
function S(n) {
|
|
929
1111
|
switch (n.type) {
|
|
930
1112
|
case "function":
|
|
931
1113
|
return `function:${n.name}`;
|
|
@@ -948,15 +1130,15 @@ function I(n) {
|
|
|
948
1130
|
return `${n.type}:${JSON.stringify(n)}`;
|
|
949
1131
|
}
|
|
950
1132
|
}
|
|
951
|
-
function
|
|
1133
|
+
function v(n) {
|
|
952
1134
|
const t = /* @__PURE__ */ new Set(), o = [];
|
|
953
1135
|
for (const e of n) {
|
|
954
|
-
const r =
|
|
1136
|
+
const r = S(e);
|
|
955
1137
|
t.has(r) || (t.add(r), o.push(e));
|
|
956
1138
|
}
|
|
957
1139
|
return o;
|
|
958
1140
|
}
|
|
959
|
-
class
|
|
1141
|
+
class A {
|
|
960
1142
|
description = "A plugin to aggregate and provide tools from base and context booths.";
|
|
961
1143
|
id = "tool-provider";
|
|
962
1144
|
name = "Tool Provider Plugin";
|
|
@@ -969,18 +1151,18 @@ class x {
|
|
|
969
1151
|
* @returns The updated response parameters with the aggregated list of tools.
|
|
970
1152
|
*/
|
|
971
1153
|
async onBeforeMessageSend(t, o) {
|
|
972
|
-
const e = t.boothRegistry.baseBoothConfig, r = t.boothRegistry.currentContextBoothConfig,
|
|
1154
|
+
const e = t.boothRegistry.baseBoothConfig, r = t.boothRegistry.currentContextBoothConfig, l = [...e.tools || [], ...r?.tools || []].filter((a, g, d) => d.indexOf(a) === g).map(
|
|
973
1155
|
(a) => t.toolRegistry.getTool(a)
|
|
974
1156
|
);
|
|
975
|
-
if (e.mcp &&
|
|
1157
|
+
if (e.mcp && l.push(...e.mcp), r?.mcp && l.push(...r.mcp), t.boothRegistry.isMultiBoothMode) {
|
|
976
1158
|
const a = y(t.boothRegistry);
|
|
977
|
-
|
|
1159
|
+
l.push(a);
|
|
978
1160
|
}
|
|
979
|
-
|
|
980
|
-
const
|
|
1161
|
+
l.push(...t.toolRegistry.getGlobalTools());
|
|
1162
|
+
const h = v(l);
|
|
981
1163
|
return {
|
|
982
1164
|
...o,
|
|
983
|
-
tools:
|
|
1165
|
+
tools: h
|
|
984
1166
|
};
|
|
985
1167
|
}
|
|
986
1168
|
/**
|
|
@@ -1023,16 +1205,16 @@ class m {
|
|
|
1023
1205
|
call_id: r.call_id,
|
|
1024
1206
|
output: `Error: Tool '${r.name}' does not have an 'execute' method.`
|
|
1025
1207
|
};
|
|
1026
|
-
const
|
|
1208
|
+
const i = await s.execute(JSON.parse(r.arguments)), l = await t.pluginRegistry.runAfterToolCall(
|
|
1027
1209
|
t,
|
|
1028
1210
|
r,
|
|
1029
|
-
|
|
1211
|
+
i,
|
|
1030
1212
|
e
|
|
1031
1213
|
);
|
|
1032
1214
|
return {
|
|
1033
1215
|
type: "function_call_output",
|
|
1034
1216
|
call_id: r.call_id,
|
|
1035
|
-
output: JSON.stringify(
|
|
1217
|
+
output: JSON.stringify(l)
|
|
1036
1218
|
};
|
|
1037
1219
|
} catch (r) {
|
|
1038
1220
|
console.error(`Error executing tool ${o.name}:`, r);
|
|
@@ -1067,22 +1249,22 @@ class m {
|
|
|
1067
1249
|
const r = e?.output ?? [], s = m.extractFunctionCalls(r);
|
|
1068
1250
|
if (!s.length)
|
|
1069
1251
|
return o;
|
|
1070
|
-
const
|
|
1071
|
-
for (let
|
|
1072
|
-
const
|
|
1073
|
-
if (t.toolRegistry.isLocalTool(
|
|
1252
|
+
const i = [];
|
|
1253
|
+
for (let l = 0; l < s.length; l++) {
|
|
1254
|
+
const h = s[l];
|
|
1255
|
+
if (t.toolRegistry.isLocalTool(h.name))
|
|
1074
1256
|
continue;
|
|
1075
1257
|
const a = {
|
|
1076
1258
|
responseParams: o,
|
|
1077
1259
|
response: e,
|
|
1078
|
-
toolCallIndex:
|
|
1260
|
+
toolCallIndex: l,
|
|
1079
1261
|
totalToolCalls: s.length
|
|
1080
|
-
},
|
|
1081
|
-
|
|
1262
|
+
}, g = await this.executeToolCall(t, h, a);
|
|
1263
|
+
i.push(g);
|
|
1082
1264
|
}
|
|
1083
1265
|
return {
|
|
1084
1266
|
...o,
|
|
1085
|
-
input: [...o.input, ...
|
|
1267
|
+
input: [...o.input, ...i]
|
|
1086
1268
|
};
|
|
1087
1269
|
}
|
|
1088
1270
|
/**
|
|
@@ -1151,16 +1333,99 @@ class M {
|
|
|
1151
1333
|
return o;
|
|
1152
1334
|
}
|
|
1153
1335
|
}
|
|
1154
|
-
|
|
1155
|
-
|
|
1156
|
-
|
|
1336
|
+
class $ {
|
|
1337
|
+
id = "streaming-logger";
|
|
1338
|
+
name = "Streaming Logger Plugin";
|
|
1339
|
+
description = "Logs streaming events in real-time for debugging and monitoring";
|
|
1340
|
+
logPrefix;
|
|
1341
|
+
constructor(t = "[StreamingLogger]") {
|
|
1342
|
+
this.logPrefix = t;
|
|
1343
|
+
}
|
|
1344
|
+
/**
|
|
1345
|
+
* Handle individual stream events as they arrive.
|
|
1346
|
+
* This allows for real-time processing and logging of streaming content.
|
|
1347
|
+
*/
|
|
1348
|
+
async onStreamEvent(t, o, e) {
|
|
1349
|
+
switch (o.type) {
|
|
1350
|
+
case "response_start":
|
|
1351
|
+
console.log(`${this.logPrefix} Stream started`);
|
|
1352
|
+
break;
|
|
1353
|
+
case "text_delta":
|
|
1354
|
+
console.log(`${this.logPrefix} Text chunk [${e.streamIndex}]: "${o.delta}"`);
|
|
1355
|
+
break;
|
|
1356
|
+
case "tool_call_start":
|
|
1357
|
+
console.log(`${this.logPrefix} Tool call started: ${o.toolCall?.name}`);
|
|
1358
|
+
break;
|
|
1359
|
+
case "tool_call_end":
|
|
1360
|
+
console.log(`${this.logPrefix} Tool call completed: ${o.toolCall?.name}`);
|
|
1361
|
+
break;
|
|
1362
|
+
case "response_end":
|
|
1363
|
+
console.log(`${this.logPrefix} Stream completed after ${e.streamIndex} events`);
|
|
1364
|
+
break;
|
|
1365
|
+
default:
|
|
1366
|
+
console.log(`${this.logPrefix} Stream event [${e.streamIndex}]: ${o.type}`);
|
|
1367
|
+
}
|
|
1368
|
+
return o;
|
|
1369
|
+
}
|
|
1370
|
+
/**
|
|
1371
|
+
* Required method - determines whether to end the interaction loop.
|
|
1372
|
+
* For a logging plugin, we never want to end the loop ourselves.
|
|
1373
|
+
*/
|
|
1374
|
+
async shouldEndInteractionLoop() {
|
|
1375
|
+
return !1;
|
|
1376
|
+
}
|
|
1377
|
+
}
|
|
1378
|
+
class k {
|
|
1379
|
+
id = "streaming-ui";
|
|
1380
|
+
name = "Streaming UI Plugin";
|
|
1381
|
+
description = "Provides real-time UI updates during streaming responses";
|
|
1382
|
+
onStreamCallback;
|
|
1383
|
+
constructor(t) {
|
|
1384
|
+
this.onStreamCallback = t;
|
|
1385
|
+
}
|
|
1386
|
+
/**
|
|
1387
|
+
* Handle individual stream events and emit them to the UI layer.
|
|
1388
|
+
* This enables real-time updates to the user interface.
|
|
1389
|
+
*/
|
|
1390
|
+
async onStreamEvent(t, o, e) {
|
|
1391
|
+
this.onStreamCallback && o.type === "text_delta" && this.onStreamCallback(o, e);
|
|
1392
|
+
let r = o;
|
|
1393
|
+
return o.type === "text_delta" && o.delta && (r = {
|
|
1394
|
+
...o,
|
|
1395
|
+
// Example: add HTML escaping (though this should be done in the UI layer)
|
|
1396
|
+
delta: o.delta
|
|
1397
|
+
}), r;
|
|
1398
|
+
}
|
|
1399
|
+
/**
|
|
1400
|
+
* Set or update the stream callback for UI updates.
|
|
1401
|
+
*/
|
|
1402
|
+
setStreamCallback(t) {
|
|
1403
|
+
this.onStreamCallback = t;
|
|
1404
|
+
}
|
|
1405
|
+
/**
|
|
1406
|
+
* Remove the stream callback.
|
|
1407
|
+
*/
|
|
1408
|
+
removeStreamCallback() {
|
|
1409
|
+
this.onStreamCallback = void 0;
|
|
1410
|
+
}
|
|
1411
|
+
/**
|
|
1412
|
+
* Required method - determines whether to end the interaction loop.
|
|
1413
|
+
* For a UI plugin, we never want to end the loop ourselves.
|
|
1414
|
+
*/
|
|
1415
|
+
async shouldEndInteractionLoop() {
|
|
1416
|
+
return !1;
|
|
1417
|
+
}
|
|
1418
|
+
}
|
|
1419
|
+
function L(n, t) {
|
|
1420
|
+
const o = new B(t), e = new w(), r = new p();
|
|
1421
|
+
return new P({
|
|
1157
1422
|
llmAdapter: n,
|
|
1158
1423
|
booths: o,
|
|
1159
1424
|
tools: e,
|
|
1160
1425
|
boothPlugins: r
|
|
1161
1426
|
});
|
|
1162
1427
|
}
|
|
1163
|
-
class
|
|
1428
|
+
class P {
|
|
1164
1429
|
/**
|
|
1165
1430
|
* Represents a registry that maintains a collection of plugins for a booth system.
|
|
1166
1431
|
* The boothPluginRegistry is used to manage and access plugins that enhance
|
|
@@ -1215,7 +1480,7 @@ class A {
|
|
|
1215
1480
|
* @param {ToolRegistry} options.tools - Registry containing tool configurations
|
|
1216
1481
|
*/
|
|
1217
1482
|
constructor(t) {
|
|
1218
|
-
if (this.boothPluginRegistry = t?.boothPlugins ?? new p(), this.boothRegistry = t.booths, this.toolRegistry = t?.tools ?? new
|
|
1483
|
+
if (this.boothPluginRegistry = t?.boothPlugins ?? new p(), this.boothRegistry = t.booths, this.toolRegistry = t?.tools ?? new w(), this.boothRegistry.setMultiBoothModeCallbacks(
|
|
1219
1484
|
() => {
|
|
1220
1485
|
const o = y(this.boothRegistry);
|
|
1221
1486
|
this.toolRegistry.registerTools([o]);
|
|
@@ -1227,12 +1492,12 @@ class A {
|
|
|
1227
1492
|
this.toolRegistry.registerTools([o]);
|
|
1228
1493
|
}
|
|
1229
1494
|
this.systemPluginsRegistry = new p(), this.systemPluginsRegistry.registerPlugins([
|
|
1230
|
-
new
|
|
1231
|
-
new
|
|
1232
|
-
new
|
|
1495
|
+
new x(t.sessionHistory),
|
|
1496
|
+
new C(),
|
|
1497
|
+
new A(),
|
|
1233
1498
|
new m(),
|
|
1234
1499
|
new M()
|
|
1235
|
-
]), this.systemPluginsRegistry.registerPlugins(this.boothPluginRegistry.getPlugins()), this.callProcessor = new
|
|
1500
|
+
]), this.systemPluginsRegistry.registerPlugins(this.boothPluginRegistry.getPlugins()), this.callProcessor = new T(
|
|
1236
1501
|
this.boothRegistry,
|
|
1237
1502
|
this.systemPluginsRegistry,
|
|
1238
1503
|
this.toolRegistry,
|
|
@@ -1242,15 +1507,17 @@ class A {
|
|
|
1242
1507
|
}
|
|
1243
1508
|
export {
|
|
1244
1509
|
p as BoothPluginRegistry,
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1510
|
+
B as BoothRegistry,
|
|
1511
|
+
C as ContextProviderPlugin,
|
|
1512
|
+
x as ConversationHistoryPlugin,
|
|
1513
|
+
P as CoreBooth,
|
|
1249
1514
|
M as FinishTurnPlugin,
|
|
1250
|
-
|
|
1515
|
+
T as InteractionProcessor,
|
|
1516
|
+
$ as StreamingLoggerPlugin,
|
|
1517
|
+
k as StreamingUIPlugin,
|
|
1251
1518
|
m as ToolExecutorPlugin,
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1519
|
+
A as ToolProviderPlugin,
|
|
1520
|
+
w as ToolRegistry,
|
|
1521
|
+
L as createCoreBooth,
|
|
1255
1522
|
y as createRouteToBoothTool
|
|
1256
1523
|
};
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "booths",
|
|
3
3
|
"private": false,
|
|
4
|
-
"version": "1.
|
|
4
|
+
"version": "1.3.1",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
7
7
|
"module": "./dist/index.js",
|
|
@@ -19,11 +19,17 @@
|
|
|
19
19
|
"build:pack": "npm install --package-lock-only && npm run build && npm pack",
|
|
20
20
|
"build": "tsc && vite build",
|
|
21
21
|
"format": "prettier --write \"src/**/*.{js,ts,json,css,scss,md}\"",
|
|
22
|
-
"typecheck": "tsc --noEmit"
|
|
22
|
+
"typecheck": "tsc --noEmit",
|
|
23
|
+
"test": "vitest",
|
|
24
|
+
"test:watch": "vitest --watch",
|
|
25
|
+
"test:ui": "vitest --ui",
|
|
26
|
+
"test:coverage": "vitest --coverage"
|
|
23
27
|
},
|
|
24
28
|
"devDependencies": {
|
|
25
29
|
"@eslint/js": "^9.30.1",
|
|
26
30
|
"@types/node": "^24.0.11",
|
|
31
|
+
"@vitest/coverage-v8": "^3.2.4",
|
|
32
|
+
"@vitest/ui": "^3.2.4",
|
|
27
33
|
"dotenv": "^17.2.0",
|
|
28
34
|
"eslint": "^9.30.1",
|
|
29
35
|
"eslint-config-prettier": "^10.1.5",
|
|
@@ -33,9 +39,11 @@
|
|
|
33
39
|
"ts-node": "^10.9.2",
|
|
34
40
|
"typescript": "~5.8.3",
|
|
35
41
|
"typescript-eslint": "^8.36.0",
|
|
42
|
+
"vi-fetch": "^0.8.0",
|
|
36
43
|
"vite": "^6.3.5",
|
|
37
44
|
"vite-plugin-dts": "^4.5.4",
|
|
38
|
-
"vite-tsconfig-paths": "^5.1.4"
|
|
45
|
+
"vite-tsconfig-paths": "^5.1.4",
|
|
46
|
+
"vitest": "^3.2.4"
|
|
39
47
|
},
|
|
40
48
|
"peerDependencies": {
|
|
41
49
|
"openai": "^5.8.2"
|