booths 1.3.1 → 1.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/README.md +198 -349
  2. package/dist/index.d.ts +38 -216
  3. package/dist/index.js +147 -373
  4. package/package.json +2 -5
package/dist/index.d.ts CHANGED
@@ -120,9 +120,9 @@ export declare interface BoothPlugin {
120
120
  * @param toolCall - The tool call that was executed.
121
121
  * @param result - The result returned by the tool execution.
122
122
  * @param context - Context information about the tool call execution.
123
- * @returns The potentially modified tool call result.
123
+ * @returns The potentially modified tool call result, otherwise the original result.
124
124
  */
125
- onAfterToolCall?: (utilities: RepositoryUtilities, toolCall: ResponseFunctionToolCall, result: unknown, context: ToolCallContext) => Promise<unknown>;
125
+ onAfterToolCall?: (utilities: RepositoryUtilities, toolCall: ResponseFunctionToolCall, result: unknown, context: ToolCallContext) => Promise<typeof result>;
126
126
  /**
127
127
  * Called when an individual tool call encounters an error during execution.
128
128
  * This allows for custom error handling or recovery.
@@ -150,15 +150,6 @@ export declare interface BoothPlugin {
150
150
  * @returns The potentially modified final response.
151
151
  */
152
152
  onAfterInteractionLoopEnd?: (interactionLoopEndArgs: RepositoryUtilities, response: Response_2) => Promise<Response_2>;
153
- /**
154
- * Called for each streaming event as it arrives during response generation.
155
- * This is optional and only called when streaming is enabled.
156
- * @param utilities - Utilities for accessing repositories.
157
- * @param streamEvent - The streaming event that was received.
158
- * @param context - Context information about the streaming session.
159
- * @returns The potentially modified stream event, or void to pass through unchanged.
160
- */
161
- onStreamEvent?: (utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext) => Promise<StreamEvent | void>;
162
153
  }
163
154
 
164
155
  /**
@@ -302,17 +293,6 @@ export declare class BoothPluginRegistry {
302
293
  * @returns Error result or recovery value after all plugins have processed it
303
294
  */
304
295
  runToolCallError(utilities: RepositoryUtilities, toolCall: ResponseFunctionToolCall, error: Error, context: ToolCallContext): Promise<any>;
305
- /**
306
- * Sequentially invokes every plugin's onStreamEvent hook.
307
- * This is called for each streaming event during response generation,
308
- * allowing plugins to process or modify stream events in real-time.
309
- *
310
- * @param utilities - Context information including booth and tool registries
311
- * @param streamEvent - The streaming event that was received
312
- * @param context - Context information about the streaming session
313
- * @returns Modified stream event after all plugins have processed it
314
- */
315
- runStreamEvent(utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext): Promise<StreamEvent>;
316
296
  }
317
297
 
318
298
  /**
@@ -428,6 +408,13 @@ export declare class BoothRegistry {
428
408
  * @returns Record of all booth configurations indexed by their IDs
429
409
  */
430
410
  getAllBooths(): Record<string, BoothConfig>;
411
+ /**
412
+ * Returns only booths that should be available for routing (excludes core and orchestrator booths).
413
+ * This prevents double context issues by ensuring system booths are not selectable.
414
+ *
415
+ * @returns Record of selectable booth configurations indexed by their IDs
416
+ */
417
+ getSelectableBooths(): Record<string, BoothConfig>;
431
418
  toArray(): BoothConfig[];
432
419
  /**
433
420
  * Enables multi-booth mode by registering the orchestrator and setting it as current context.
@@ -695,6 +682,7 @@ export declare class CoreBooth<T> {
695
682
  tools?: ToolRegistry;
696
683
  llmAdapter: LLMAdapter<T>;
697
684
  sessionHistory?: ResponseInput;
685
+ endInteractionLoopMarker?: string;
698
686
  });
699
687
  }
700
688
 
@@ -724,12 +712,14 @@ export declare function createRouteToBoothTool(boothRegistry: BoothRegistry): To
724
712
  * in the LLM's response. It also cleans up this marker before the final output is returned.
725
713
  */
726
714
  export declare class FinishTurnPlugin implements BoothPlugin {
715
+ private marker;
727
716
  description: string;
728
717
  id: string;
729
718
  name: string;
719
+ constructor(marker?: string);
730
720
  /**
731
721
  * Before sending a message, this hook adds an instruction to the LLM to include a
732
- * specific marker (`__awaiting_user_response__`) when it expects a user response.
722
+ * specific marker when it expects a user response.
733
723
  * @param _ - Unused repository utilities.
734
724
  * @param responseParams - The parameters for the response creation.
735
725
  * @returns The updated response parameters with the added instruction.
@@ -759,7 +749,7 @@ export declare class FinishTurnPlugin implements BoothPlugin {
759
749
  }>;
760
750
  /**
761
751
  * Determines whether the interaction loop should end by checking for the presence of the
762
- * `__awaiting_user_response__` marker in the response text or if there's an error response.
752
+ * marker in the response text or if there's an error response.
763
753
  * @param _ - Unused repository utilities.
764
754
  * @param __ - Unused response parameters.
765
755
  * @param response - The response from the LLM.
@@ -767,7 +757,7 @@ export declare class FinishTurnPlugin implements BoothPlugin {
767
757
  */
768
758
  shouldEndInteractionLoop(_: RepositoryUtilities, __: ResponseCreateParamsNonStreaming, response: Response_2): Promise<boolean>;
769
759
  /**
770
- * After the interaction loop ends, this hook removes the `__awaiting_user_response__` marker
760
+ * After the interaction loop ends, this hook removes the `marker` marker
771
761
  * from the final response before it is returned.
772
762
  * @param _ - Unused repository utilities.
773
763
  * @param response - The final response from the LLM.
@@ -777,30 +767,19 @@ export declare class FinishTurnPlugin implements BoothPlugin {
777
767
  }
778
768
 
779
769
  /**
780
- * The InteractionProcessor class orchestrates the conversation with the LLM,
781
- * managing the interaction loop, plugin execution, and message passing.
770
+ * A class responsible for processing interactions with a language learning model (LLM)
771
+ * by delegating tasks to a plugin-enabled architecture.
772
+ * This class manages the process of sending messages to an LLM,
773
+ * invoking plugins, handling errors, and iterating through responses.
774
+ *
775
+ * @template T The type used in the LLMAdapter for communicating with the LLM.
782
776
  */
783
777
  export declare class InteractionProcessor<T> {
784
778
  private boothRegistry;
785
779
  private boothPlugins;
786
780
  private toolRegistry;
787
781
  private llmAdapter;
788
- /**
789
- * Generates a consistent ID for responses and messages.
790
- * @param prefix - The prefix for the ID (e.g., 'stream', 'error', 'msg')
791
- * @returns A unique ID string
792
- * @private
793
- */
794
- private generateId;
795
- /**
796
- * Creates a standardized message object for responses.
797
- * @param text - The text content for the message
798
- * @returns A formatted message object
799
- * @private
800
- */
801
- private createMessage;
802
782
  private loopLimit;
803
- private options;
804
783
  /**
805
784
  * Creates a synthetic error response with proper structure and error details.
806
785
  * @param error - The error that occurred
@@ -810,64 +789,13 @@ export declare class InteractionProcessor<T> {
810
789
  */
811
790
  private createErrorResponse;
812
791
  /**
813
- * Calls the LLM with the given parameters.
814
- * @param responseCreateParams - The parameters for creating the response.
815
- * @returns A promise that resolves with the LLM's response.
816
- * @private
792
+ * Calls the LLM adapter to invoke and interpret the response for the given parameters.
793
+ *
794
+ * @param {ResponseCreateParamsNonStreaming} responseCreateParams - The parameters for creating the response.
795
+ * @param {RepositoryUtilities} prepareInitialMessagesArgs - The arguments required to prepare initial messages.
796
+ * @return {Promise<Response>} A promise that resolves to the interpreted response or an error response in case of failure.
817
797
  */
818
798
  private callLLM;
819
- /**
820
- * Calls the LLM in non-streaming mode.
821
- * @param responseCreateParams - The parameters for creating the response.
822
- * @returns A promise that resolves with the LLM's response.
823
- * @private
824
- */
825
- private callLLMNonStreaming;
826
- /**
827
- * Calls the LLM in streaming mode, accumulating stream events into a complete response.
828
- * @param responseCreateParams - The parameters for creating the response.
829
- * @returns A promise that resolves with the accumulated response.
830
- * @private
831
- */
832
- private callLLMStreaming;
833
- /**
834
- * Type guard to check if an output item has a function property (tool call).
835
- * @param output - The output item to check
836
- * @returns True if the output item is a tool call with a function
837
- * @private
838
- */
839
- private isToolCall;
840
- /**
841
- * Helper function to safely update tool call arguments.
842
- * @param accumulated - The accumulated response
843
- * @param callId - The tool call ID to update
844
- * @param delta - The argument delta to append
845
- * @private
846
- */
847
- private updateToolCallArguments;
848
- /**
849
- * Helper function to safely update a tool call with final data.
850
- * @param accumulated - The accumulated response
851
- * @param toolCallData - The final tool call data
852
- * @private
853
- */
854
- private updateToolCallFunction;
855
- /**
856
- * Merges a stream event into the accumulated response.
857
- * @param accumulated - The current accumulated response.
858
- * @param streamEvent - The stream event to merge.
859
- * @returns The updated accumulated response.
860
- * @private
861
- */
862
- private mergeStreamEvent;
863
- /**
864
- * Creates a complete Response object from accumulated stream data.
865
- * @param accumulated - The accumulated response data.
866
- * @param originalParams - The original request parameters.
867
- * @returns A complete Response object.
868
- * @private
869
- */
870
- private finalizeAccumulatedResponse;
871
799
  /**
872
800
  * Runs the main interaction loop, sending messages to the LLM and processing
873
801
  * the responses through the registered plugins.
@@ -882,9 +810,8 @@ export declare class InteractionProcessor<T> {
882
810
  * @param boothPlugins - The registry for booth plugins.
883
811
  * @param toolRegistry - The registry for available tools.
884
812
  * @param llmAdapter - The adapter for interacting with the LLM.
885
- * @param options - Configuration options for streaming and other behaviors.
886
813
  */
887
- constructor(boothRegistry: BoothRegistry, boothPlugins: BoothPluginRegistry, toolRegistry: ToolRegistry, llmAdapter: LLMAdapter<T>, options?: InteractionProcessorOptions);
814
+ constructor(boothRegistry: BoothRegistry, boothPlugins: BoothPluginRegistry, toolRegistry: ToolRegistry, llmAdapter: LLMAdapter<T>);
888
815
  /**
889
816
  * Sends a message to the LLM and processes the response through the interaction loop.
890
817
  * This involves running pre-loop, pre-send, response-received, and post-loop plugin hooks.
@@ -895,22 +822,14 @@ export declare class InteractionProcessor<T> {
895
822
  }
896
823
 
897
824
  /**
898
- * Configuration options for the InteractionProcessor.
825
+ * Interface representing a Large Language Model (LLM) Adapter that provides methods
826
+ * to interact with and interpret responses from an LLM.
827
+ *
828
+ * @template LLMResponse The type of the raw response returned by the LLM. Defaults to `any`.
899
829
  */
900
- export declare interface InteractionProcessorOptions {
901
- /** Enable streaming mode for LLM responses */
902
- streaming?: boolean;
903
- /** Fallback to non-streaming if streaming fails */
904
- fallbackToNonStreaming?: boolean;
905
- }
906
-
907
830
  export declare interface LLMAdapter<LLMResponse = any> {
908
- invoke: (responseParams: ResponseCreateParamsNonStreaming) => Promise<LLMResponse>;
831
+ invoke: (responseParams: ResponseCreateParamsNonStreaming, prepareInitialMessagesArgs: RepositoryUtilities) => Promise<LLMResponse>;
909
832
  interpret: (response: LLMResponse) => Promise<Response_2>;
910
- /** Optional method for streaming LLM responses */
911
- invokeStream?: (responseParams: ResponseCreateParamsStreaming) => AsyncIterable<LLMResponse>;
912
- /** Optional method for interpreting individual stream chunks into StreamEvents */
913
- interpretStream?: (streamChunk: LLMResponse) => Promise<StreamEvent>;
914
833
  }
915
834
 
916
835
  /**
@@ -934,24 +853,8 @@ export declare type RepositoryUtilities = {
934
853
  llmAdapter: LLMAdapter<unknown>;
935
854
  };
936
855
 
937
- export { Response_2 as Response }
938
-
939
856
  export { ResponseCreateParamsNonStreaming }
940
857
 
941
- /**
942
- * Response parameters for streaming requests.
943
- * This creates a new type that has all the properties of ResponseCreateParamsNonStreaming
944
- * but with stream: true instead of stream: false.
945
- */
946
- export declare type ResponseCreateParamsStreaming = Omit<ResponseCreateParamsNonStreaming, 'stream'> & {
947
- /** Must be true for streaming requests */
948
- stream: true;
949
- };
950
-
951
- export { ResponseInput }
952
-
953
- export { ResponseInputItem }
954
-
955
858
  /**
956
859
  * Represents the result of processing a single tool call.
957
860
  */
@@ -972,93 +875,6 @@ export declare type SingleToolProcessingResult = {
972
875
  toolExecuted: boolean;
973
876
  };
974
877
 
975
- /**
976
- * Context information provided during streaming event processing.
977
- */
978
- export declare interface StreamContext {
979
- /** The current response parameters being processed */
980
- responseParams: ResponseCreateParamsNonStreaming;
981
- /** Index of this stream event in the sequence */
982
- streamIndex: number;
983
- /** Total expected number of events (if known) */
984
- totalExpectedEvents?: number;
985
- /** Accumulated response content so far */
986
- accumulatedResponse: Partial<Response_2>;
987
- }
988
-
989
- /**
990
- * Represents a streaming event emitted during LLM response generation.
991
- */
992
- export declare interface StreamEvent {
993
- /** Type of stream event */
994
- type: 'text_delta' | 'tool_call_start' | 'tool_call_end' | 'response_start' | 'response_end';
995
- /** Text content for text_delta events */
996
- content?: string;
997
- /** Incremental text delta for text_delta events */
998
- delta?: string;
999
- /** Tool call information for tool-related events */
1000
- toolCall?: ResponseFunctionToolCall;
1001
- /** Additional metadata for the event */
1002
- metadata?: Record<string, unknown>;
1003
- }
1004
-
1005
- /**
1006
- * Callback function type for handling stream events in the UI.
1007
- */
1008
- export declare type StreamEventCallback = (event: StreamEvent, context: StreamContext) => void;
1009
-
1010
- /**
1011
- * Example streaming plugin that logs stream events in real-time.
1012
- * This demonstrates how to implement streaming hooks in plugins.
1013
- */
1014
- export declare class StreamingLoggerPlugin implements BoothPlugin {
1015
- readonly id = "streaming-logger";
1016
- readonly name = "Streaming Logger Plugin";
1017
- readonly description = "Logs streaming events in real-time for debugging and monitoring";
1018
- private logPrefix;
1019
- constructor(logPrefix?: string);
1020
- /**
1021
- * Handle individual stream events as they arrive.
1022
- * This allows for real-time processing and logging of streaming content.
1023
- */
1024
- onStreamEvent(_utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext): Promise<StreamEvent>;
1025
- /**
1026
- * Required method - determines whether to end the interaction loop.
1027
- * For a logging plugin, we never want to end the loop ourselves.
1028
- */
1029
- shouldEndInteractionLoop(): Promise<boolean>;
1030
- }
1031
-
1032
- /**
1033
- * Example streaming plugin that provides real-time UI updates.
1034
- * This plugin demonstrates how to emit stream events to the UI layer.
1035
- */
1036
- export declare class StreamingUIPlugin implements BoothPlugin {
1037
- readonly id = "streaming-ui";
1038
- readonly name = "Streaming UI Plugin";
1039
- readonly description = "Provides real-time UI updates during streaming responses";
1040
- private onStreamCallback?;
1041
- constructor(onStreamCallback?: StreamEventCallback);
1042
- /**
1043
- * Handle individual stream events and emit them to the UI layer.
1044
- * This enables real-time updates to the user interface.
1045
- */
1046
- onStreamEvent(_utilities: RepositoryUtilities, streamEvent: StreamEvent, context: StreamContext): Promise<StreamEvent>;
1047
- /**
1048
- * Set or update the stream callback for UI updates.
1049
- */
1050
- setStreamCallback(callback: StreamEventCallback): void;
1051
- /**
1052
- * Remove the stream callback.
1053
- */
1054
- removeStreamCallback(): void;
1055
- /**
1056
- * Required method - determines whether to end the interaction loop.
1057
- * For a UI plugin, we never want to end the loop ourselves.
1058
- */
1059
- shouldEndInteractionLoop(): Promise<boolean>;
1060
- }
1061
-
1062
878
  /**
1063
879
  * Context information provided during tool call execution.
1064
880
  */
@@ -1090,6 +906,12 @@ export declare class ToolExecutorPlugin implements BoothPlugin {
1090
906
  * @private
1091
907
  */
1092
908
  private executeToolCall;
909
+ /**
910
+ * Extracts function call objects from an array of response output items.
911
+ *
912
+ * @param {ResponseOutputItem[]} output - The array of response output items to filter.
913
+ * @return {ResponseFunctionToolCall[]} An array containing only the function call objects from the input.
914
+ */
1093
915
  static extractFunctionCalls(output: ResponseOutputItem[]): ResponseFunctionToolCall[];
1094
916
  /**
1095
917
  * After a response is received from the LLM, this hook checks for tool calls. If any are found,