modelfusion 0.20.1 → 0.22.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (149) hide show
  1. package/README.md +5 -7
  2. package/composed-function/summarize/SummarizationFunction.d.ts +3 -3
  3. package/composed-function/summarize/summarizeRecursively.d.ts +1 -1
  4. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +1 -1
  5. package/{run → core}/ConsoleLogger.cjs +1 -4
  6. package/core/ConsoleLogger.d.ts +5 -0
  7. package/core/ConsoleLogger.js +5 -0
  8. package/{run → core}/DefaultRun.cjs +2 -5
  9. package/{run → core}/DefaultRun.d.ts +5 -5
  10. package/{run → core}/DefaultRun.js +2 -5
  11. package/core/FunctionEvent.d.ts +75 -0
  12. package/{run/RunFunctionEventSource.js → core/FunctionEventSource.cjs} +7 -13
  13. package/core/FunctionEventSource.d.ts +12 -0
  14. package/{run/RunFunctionEventSource.cjs → core/FunctionEventSource.js} +3 -17
  15. package/core/FunctionObserver.d.ts +7 -0
  16. package/core/FunctionOptions.d.ts +19 -0
  17. package/core/GlobalFunctionObservers.cjs +12 -0
  18. package/core/GlobalFunctionObservers.d.ts +3 -0
  19. package/core/GlobalFunctionObservers.js +7 -0
  20. package/{run → core}/Run.d.ts +2 -2
  21. package/{run → core}/index.cjs +5 -5
  22. package/core/index.d.ts +9 -0
  23. package/core/index.js +9 -0
  24. package/index.cjs +1 -1
  25. package/index.d.ts +1 -1
  26. package/index.js +1 -1
  27. package/model-function/Model.d.ts +5 -2
  28. package/model-function/ModelCallEvent.d.ts +20 -5
  29. package/model-function/ModelFunctionOptions.d.ts +4 -0
  30. package/model-function/SuccessfulModelCall.cjs +6 -16
  31. package/model-function/SuccessfulModelCall.d.ts +2 -2
  32. package/model-function/SuccessfulModelCall.js +6 -16
  33. package/model-function/embed-text/TextEmbeddingEvent.d.ts +15 -17
  34. package/model-function/embed-text/TextEmbeddingModel.d.ts +3 -3
  35. package/model-function/embed-text/embedText.cjs +6 -67
  36. package/model-function/embed-text/embedText.d.ts +4 -4
  37. package/model-function/embed-text/embedText.js +6 -67
  38. package/model-function/executeCall.cjs +49 -14
  39. package/model-function/executeCall.d.ts +9 -10
  40. package/model-function/executeCall.js +49 -14
  41. package/model-function/generate-image/ImageGenerationEvent.d.ts +12 -16
  42. package/model-function/generate-image/ImageGenerationModel.d.ts +2 -2
  43. package/model-function/generate-image/generateImage.cjs +2 -30
  44. package/model-function/generate-image/generateImage.d.ts +2 -2
  45. package/model-function/generate-image/generateImage.js +2 -30
  46. package/model-function/generate-json/GenerateJsonModel.d.ts +2 -2
  47. package/model-function/generate-json/GenerateJsonOrTextModel.d.ts +2 -2
  48. package/model-function/generate-json/JsonGenerationEvent.d.ts +6 -21
  49. package/model-function/generate-json/JsonTextGenerationModel.d.ts +2 -2
  50. package/model-function/generate-json/generateJson.cjs +2 -30
  51. package/model-function/generate-json/generateJson.d.ts +2 -2
  52. package/model-function/generate-json/generateJson.js +2 -30
  53. package/model-function/generate-json/generateJsonOrText.cjs +2 -30
  54. package/model-function/generate-json/generateJsonOrText.d.ts +2 -2
  55. package/model-function/generate-json/generateJsonOrText.js +2 -30
  56. package/model-function/generate-text/TextGenerationEvent.d.ts +13 -15
  57. package/model-function/generate-text/TextGenerationModel.d.ts +3 -3
  58. package/model-function/generate-text/TextStreamingEvent.d.ts +7 -22
  59. package/model-function/generate-text/generateText.cjs +2 -30
  60. package/model-function/generate-text/generateText.d.ts +2 -2
  61. package/model-function/generate-text/generateText.js +2 -30
  62. package/model-function/generate-text/streamText.cjs +52 -42
  63. package/model-function/generate-text/streamText.d.ts +5 -5
  64. package/model-function/generate-text/streamText.js +52 -42
  65. package/model-function/index.cjs +1 -1
  66. package/model-function/index.d.ts +1 -1
  67. package/model-function/index.js +1 -1
  68. package/model-function/synthesize-speech/SpeechSynthesisEvent.d.ts +14 -15
  69. package/model-function/synthesize-speech/SpeechSynthesisModel.d.ts +2 -2
  70. package/model-function/synthesize-speech/synthesizeSpeech.cjs +2 -30
  71. package/model-function/synthesize-speech/synthesizeSpeech.d.ts +2 -2
  72. package/model-function/synthesize-speech/synthesizeSpeech.js +2 -30
  73. package/model-function/transcribe-speech/TranscriptionEvent.d.ts +12 -16
  74. package/model-function/transcribe-speech/TranscriptionModel.d.ts +2 -2
  75. package/model-function/transcribe-speech/transcribe.cjs +2 -30
  76. package/model-function/transcribe-speech/transcribe.d.ts +2 -2
  77. package/model-function/transcribe-speech/transcribe.js +2 -30
  78. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +3 -3
  79. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +6 -6
  80. package/model-provider/cohere/CohereTextGenerationModel.d.ts +4 -4
  81. package/model-provider/cohere/CohereTokenizer.d.ts +1 -1
  82. package/model-provider/elevenlabs/ElevenLabsSpeechSynthesisModel.d.ts +2 -2
  83. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +3 -3
  84. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +3 -3
  85. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +3 -3
  86. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +4 -4
  87. package/model-provider/llamacpp/LlamaCppTokenizer.d.ts +1 -1
  88. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +3 -3
  89. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +3 -3
  90. package/model-provider/openai/OpenAITextGenerationModel.d.ts +4 -4
  91. package/model-provider/openai/OpenAITranscriptionModel.d.ts +3 -3
  92. package/model-provider/openai/chat/OpenAIChatModel.d.ts +5 -5
  93. package/model-provider/stability/StabilityImageGenerationModel.d.ts +3 -3
  94. package/package.json +1 -1
  95. package/prompt/PromptFormatTextGenerationModel.d.ts +3 -3
  96. package/text-chunk/SimilarTextChunksFromVectorIndexRetriever.cjs +1 -0
  97. package/text-chunk/SimilarTextChunksFromVectorIndexRetriever.d.ts +2 -2
  98. package/text-chunk/SimilarTextChunksFromVectorIndexRetriever.js +1 -0
  99. package/text-chunk/retrieve-text-chunks/TextChunkRetriever.d.ts +2 -2
  100. package/text-chunk/retrieve-text-chunks/retrieveTextChunks.d.ts +2 -2
  101. package/text-chunk/split/SplitFunction.d.ts +3 -3
  102. package/text-chunk/upsertTextChunks.d.ts +2 -2
  103. package/tool/ExecuteToolEvent.d.ts +9 -20
  104. package/tool/Tool.d.ts +3 -3
  105. package/tool/WebSearchTool.cjs +25 -0
  106. package/tool/WebSearchTool.d.ts +57 -1
  107. package/tool/WebSearchTool.js +25 -0
  108. package/tool/executeTool.cjs +39 -31
  109. package/tool/executeTool.d.ts +5 -4
  110. package/tool/executeTool.js +39 -31
  111. package/tool/useTool.cjs +2 -6
  112. package/tool/useTool.d.ts +2 -2
  113. package/tool/useTool.js +2 -6
  114. package/tool/useToolOrGenerateText.cjs +1 -3
  115. package/tool/useToolOrGenerateText.d.ts +2 -2
  116. package/tool/useToolOrGenerateText.js +1 -3
  117. package/util/DurationMeasurement.cjs +6 -0
  118. package/util/DurationMeasurement.d.ts +1 -0
  119. package/util/DurationMeasurement.js +6 -0
  120. package/util/api/postToApi.cjs +8 -0
  121. package/util/api/postToApi.js +8 -0
  122. package/vector-index/VectorIndex.d.ts +1 -1
  123. package/vector-index/memory/MemoryVectorIndex.d.ts +1 -1
  124. package/vector-index/pinecone/PineconeVectorIndex.d.ts +1 -1
  125. package/model-function/FunctionOptions.d.ts +0 -6
  126. package/run/ConsoleLogger.d.ts +0 -6
  127. package/run/ConsoleLogger.js +0 -8
  128. package/run/IdMetadata.d.ts +0 -7
  129. package/run/RunFunction.d.ts +0 -9
  130. package/run/RunFunctionEvent.d.ts +0 -12
  131. package/run/RunFunctionEventSource.d.ts +0 -13
  132. package/run/RunFunctionObserver.cjs +0 -2
  133. package/run/RunFunctionObserver.d.ts +0 -5
  134. package/run/RunFunctionObserver.js +0 -1
  135. package/run/index.d.ts +0 -9
  136. package/run/index.js +0 -9
  137. /package/{model-function/FunctionOptions.cjs → core/FunctionEvent.cjs} +0 -0
  138. /package/{model-function/FunctionOptions.js → core/FunctionEvent.js} +0 -0
  139. /package/{run/IdMetadata.cjs → core/FunctionObserver.cjs} +0 -0
  140. /package/{run/IdMetadata.js → core/FunctionObserver.js} +0 -0
  141. /package/{run/Run.cjs → core/FunctionOptions.cjs} +0 -0
  142. /package/{run/Run.js → core/FunctionOptions.js} +0 -0
  143. /package/{run/RunFunction.cjs → core/Run.cjs} +0 -0
  144. /package/{run/RunFunction.js → core/Run.js} +0 -0
  145. /package/{run → core}/Vector.cjs +0 -0
  146. /package/{run → core}/Vector.d.ts +0 -0
  147. /package/{run → core}/Vector.js +0 -0
  148. /package/{run/RunFunctionEvent.cjs → model-function/ModelFunctionOptions.cjs} +0 -0
  149. /package/{run/RunFunctionEvent.js → model-function/ModelFunctionOptions.js} +0 -0
@@ -1,6 +1,6 @@
1
1
  import z from "zod";
2
2
  import { AbstractModel } from "../../model-function/AbstractModel.js";
3
- import { FunctionOptions } from "../../model-function/FunctionOptions.js";
3
+ import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
4
4
  import { TextEmbeddingModel, TextEmbeddingModelSettings } from "../../model-function/embed-text/TextEmbeddingModel.js";
5
5
  import { RetryFunction } from "../../util/api/RetryFunction.js";
6
6
  import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
@@ -23,8 +23,8 @@ export declare class LlamaCppTextEmbeddingModel extends AbstractModel<LlamaCppTe
23
23
  readonly embeddingDimensions: number | undefined;
24
24
  private readonly tokenizer;
25
25
  tokenize(text: string): Promise<number[]>;
26
- callAPI(texts: Array<string>, options?: FunctionOptions<LlamaCppTextEmbeddingModelSettings>): Promise<LlamaCppTextEmbeddingResponse>;
27
- generateEmbeddingResponse(texts: string[], options?: FunctionOptions<LlamaCppTextEmbeddingModelSettings>): Promise<{
26
+ callAPI(texts: Array<string>, options?: ModelFunctionOptions<LlamaCppTextEmbeddingModelSettings>): Promise<LlamaCppTextEmbeddingResponse>;
27
+ generateEmbeddingResponse(texts: string[], options?: ModelFunctionOptions<LlamaCppTextEmbeddingModelSettings>): Promise<{
28
28
  embedding: number[];
29
29
  }>;
30
30
  extractEmbeddings(response: LlamaCppTextEmbeddingResponse): number[][];
@@ -1,6 +1,6 @@
1
1
  import z from "zod";
2
2
  import { AbstractModel } from "../../model-function/AbstractModel.js";
3
- import { FunctionOptions } from "../../model-function/FunctionOptions.js";
3
+ import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
4
4
  import { DeltaEvent } from "../../model-function/generate-text/DeltaEvent.js";
5
5
  import { TextGenerationModel, TextGenerationModelSettings } from "../../model-function/generate-text/TextGenerationModel.js";
6
6
  import { PromptFormat } from "../../prompt/PromptFormat.js";
@@ -46,9 +46,9 @@ export declare class LlamaCppTextGenerationModel<CONTEXT_WINDOW_SIZE extends num
46
46
  readonly tokenizer: LlamaCppTokenizer;
47
47
  callAPI<RESPONSE>(prompt: string, options: {
48
48
  responseFormat: LlamaCppTextGenerationResponseFormatType<RESPONSE>;
49
- } & FunctionOptions<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): Promise<RESPONSE>;
49
+ } & ModelFunctionOptions<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): Promise<RESPONSE>;
50
50
  countPromptTokens(prompt: string): Promise<number>;
51
- generateTextResponse(prompt: string, options?: FunctionOptions<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): Promise<{
51
+ generateTextResponse(prompt: string, options?: ModelFunctionOptions<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): Promise<{
52
52
  model: string;
53
53
  prompt: string;
54
54
  content: string;
@@ -98,7 +98,7 @@ export declare class LlamaCppTextGenerationModel<CONTEXT_WINDOW_SIZE extends num
98
98
  truncated: boolean;
99
99
  }>;
100
100
  extractText(response: LlamaCppTextGenerationResponse): string;
101
- generateDeltaStreamResponse(prompt: string, options?: FunctionOptions<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): Promise<AsyncIterable<DeltaEvent<LlamaCppTextGenerationDelta>>>;
101
+ generateDeltaStreamResponse(prompt: string, options?: ModelFunctionOptions<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): Promise<AsyncIterable<DeltaEvent<LlamaCppTextGenerationDelta>>>;
102
102
  extractTextDelta(fullDelta: LlamaCppTextGenerationDelta): string | undefined;
103
103
  withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, string>): PromptFormatTextGenerationModel<INPUT_PROMPT, string, LlamaCppTextGenerationResponse, LlamaCppTextGenerationDelta, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>;
104
104
  withSettings(additionalSettings: Partial<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): this;
@@ -1,6 +1,6 @@
1
1
  import z from "zod";
2
2
  import { BasicTokenizer } from "../../model-function/tokenize-text/Tokenizer.js";
3
- import { Run } from "../../run/Run.js";
3
+ import { Run } from "../../core/Run.js";
4
4
  import { RetryFunction } from "../../util/api/RetryFunction.js";
5
5
  import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
6
6
  export interface LlamaCppTokenizerSettings {
@@ -1,6 +1,6 @@
1
1
  import { z } from "zod";
2
2
  import { AbstractModel } from "../../model-function/AbstractModel.js";
3
- import { FunctionOptions } from "../../model-function/FunctionOptions.js";
3
+ import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
4
4
  import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-function/generate-image/ImageGenerationModel.js";
5
5
  import { ResponseHandler } from "../../util/api/postToApi.js";
6
6
  import { OpenAIModelSettings } from "./OpenAIModelSettings.js";
@@ -32,10 +32,10 @@ export declare class OpenAIImageGenerationModel extends AbstractModel<OpenAIImag
32
32
  private get apiKey();
33
33
  callAPI<RESULT>(prompt: string, options: {
34
34
  responseFormat: OpenAIImageGenerationResponseFormatType<RESULT>;
35
- } & FunctionOptions<Partial<OpenAIImageGenerationCallSettings & OpenAIModelSettings & {
35
+ } & ModelFunctionOptions<Partial<OpenAIImageGenerationCallSettings & OpenAIModelSettings & {
36
36
  user?: string;
37
37
  }>>): Promise<RESULT>;
38
- generateImageResponse(prompt: string, options?: FunctionOptions<OpenAIImageGenerationSettings>): Promise<{
38
+ generateImageResponse(prompt: string, options?: ModelFunctionOptions<OpenAIImageGenerationSettings>): Promise<{
39
39
  data: {
40
40
  b64_json: string;
41
41
  }[];
@@ -1,6 +1,6 @@
1
1
  import z from "zod";
2
2
  import { AbstractModel } from "../../model-function/AbstractModel.js";
3
- import { FunctionOptions } from "../../model-function/FunctionOptions.js";
3
+ import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
4
4
  import { TextEmbeddingModel, TextEmbeddingModelSettings } from "../../model-function/embed-text/TextEmbeddingModel.js";
5
5
  import { RetryFunction } from "../../util/api/RetryFunction.js";
6
6
  import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
@@ -50,8 +50,8 @@ export declare class OpenAITextEmbeddingModel extends AbstractModel<OpenAITextEm
50
50
  readonly contextWindowSize: number;
51
51
  private get apiKey();
52
52
  countTokens(input: string): Promise<number>;
53
- callAPI(text: string, options?: FunctionOptions<OpenAITextEmbeddingModelSettings>): Promise<OpenAITextEmbeddingResponse>;
54
- generateEmbeddingResponse(texts: string[], options?: FunctionOptions<OpenAITextEmbeddingModelSettings>): Promise<{
53
+ callAPI(text: string, options?: ModelFunctionOptions<OpenAITextEmbeddingModelSettings>): Promise<OpenAITextEmbeddingResponse>;
54
+ generateEmbeddingResponse(texts: string[], options?: ModelFunctionOptions<OpenAITextEmbeddingModelSettings>): Promise<{
55
55
  object: "list";
56
56
  model: string;
57
57
  data: {
@@ -1,6 +1,6 @@
1
1
  import z from "zod";
2
2
  import { AbstractModel } from "../../model-function/AbstractModel.js";
3
- import { FunctionOptions } from "../../model-function/FunctionOptions.js";
3
+ import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
4
4
  import { DeltaEvent } from "../../model-function/generate-text/DeltaEvent.js";
5
5
  import { TextGenerationModel, TextGenerationModelSettings } from "../../model-function/generate-text/TextGenerationModel.js";
6
6
  import { PromptFormat } from "../../prompt/PromptFormat.js";
@@ -117,10 +117,10 @@ export declare class OpenAITextGenerationModel extends AbstractModel<OpenAITextG
117
117
  countPromptTokens(input: string): Promise<number>;
118
118
  callAPI<RESULT>(prompt: string, options: {
119
119
  responseFormat: OpenAITextResponseFormatType<RESULT>;
120
- } & FunctionOptions<Partial<OpenAIImageGenerationCallSettings & OpenAIModelSettings & {
120
+ } & ModelFunctionOptions<Partial<OpenAIImageGenerationCallSettings & OpenAIModelSettings & {
121
121
  user?: string;
122
122
  }>>): Promise<RESULT>;
123
- generateTextResponse(prompt: string, options?: FunctionOptions<OpenAITextGenerationModelSettings>): Promise<{
123
+ generateTextResponse(prompt: string, options?: ModelFunctionOptions<OpenAITextGenerationModelSettings>): Promise<{
124
124
  object: "text_completion";
125
125
  model: string;
126
126
  id: string;
@@ -138,7 +138,7 @@ export declare class OpenAITextGenerationModel extends AbstractModel<OpenAITextG
138
138
  }[];
139
139
  }>;
140
140
  extractText(response: OpenAITextGenerationResponse): string;
141
- generateDeltaStreamResponse(prompt: string, options?: FunctionOptions<OpenAITextGenerationModelSettings>): Promise<AsyncIterable<DeltaEvent<OpenAITextGenerationDelta>>>;
141
+ generateDeltaStreamResponse(prompt: string, options?: ModelFunctionOptions<OpenAITextGenerationModelSettings>): Promise<AsyncIterable<DeltaEvent<OpenAITextGenerationDelta>>>;
142
142
  extractTextDelta(fullDelta: OpenAITextGenerationDelta): string | undefined;
143
143
  withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, string>): PromptFormatTextGenerationModel<INPUT_PROMPT, string, OpenAITextGenerationResponse, OpenAITextGenerationDelta, OpenAITextGenerationModelSettings, this>;
144
144
  withSettings(additionalSettings: Partial<OpenAITextGenerationModelSettings>): this;
@@ -1,7 +1,7 @@
1
1
  /// <reference types="node" resolution-mode="require"/>
2
2
  import z from "zod";
3
3
  import { AbstractModel } from "../../model-function/AbstractModel.js";
4
- import { FunctionOptions } from "../../model-function/FunctionOptions.js";
4
+ import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
5
5
  import { TranscriptionModel, TranscriptionModelSettings } from "../../model-function/transcribe-speech/TranscriptionModel.js";
6
6
  import { RetryFunction } from "../../util/api/RetryFunction.js";
7
7
  import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
@@ -51,12 +51,12 @@ export declare class OpenAITranscriptionModel extends AbstractModel<OpenAITransc
51
51
  constructor(settings: OpenAITranscriptionModelSettings);
52
52
  readonly provider: "openai";
53
53
  get modelName(): "whisper-1";
54
- generateTranscriptionResponse(data: OpenAITranscriptionInput, options?: FunctionOptions<Partial<OpenAITranscriptionModelSettings & OpenAIModelSettings>>): PromiseLike<OpenAITranscriptionVerboseJsonResponse>;
54
+ generateTranscriptionResponse(data: OpenAITranscriptionInput, options?: ModelFunctionOptions<Partial<OpenAITranscriptionModelSettings & OpenAIModelSettings>>): PromiseLike<OpenAITranscriptionVerboseJsonResponse>;
55
55
  extractTranscriptionText(response: OpenAITranscriptionVerboseJsonResponse): string;
56
56
  private get apiKey();
57
57
  callAPI<RESULT>(data: OpenAITranscriptionInput, options: {
58
58
  responseFormat: OpenAITranscriptionResponseFormatType<RESULT>;
59
- } & FunctionOptions<Partial<OpenAITranscriptionModelSettings & OpenAIModelSettings>>): Promise<RESULT>;
59
+ } & ModelFunctionOptions<Partial<OpenAITranscriptionModelSettings & OpenAIModelSettings>>): Promise<RESULT>;
60
60
  withSettings(additionalSettings: OpenAITranscriptionModelSettings): this;
61
61
  }
62
62
  declare const openAITranscriptionJsonSchema: z.ZodObject<{
@@ -1,6 +1,6 @@
1
1
  import z from "zod";
2
2
  import { AbstractModel } from "../../../model-function/AbstractModel.js";
3
- import { FunctionOptions } from "../../../model-function/FunctionOptions.js";
3
+ import { ModelFunctionOptions } from "../../../model-function/ModelFunctionOptions.js";
4
4
  import { GenerateJsonModel } from "../../../model-function/generate-json/GenerateJsonModel.js";
5
5
  import { GenerateJsonOrTextModel } from "../../../model-function/generate-json/GenerateJsonOrTextModel.js";
6
6
  import { DeltaEvent } from "../../../model-function/generate-text/DeltaEvent.js";
@@ -131,10 +131,10 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
131
131
  countPromptTokens(messages: OpenAIChatMessage[]): Promise<number>;
132
132
  callAPI<RESULT>(messages: Array<OpenAIChatMessage>, options: {
133
133
  responseFormat: OpenAIChatResponseFormatType<RESULT>;
134
- } & FunctionOptions<Partial<OpenAIChatCallSettings & OpenAIModelSettings & {
134
+ } & ModelFunctionOptions<Partial<OpenAIChatCallSettings & OpenAIModelSettings & {
135
135
  user?: string;
136
136
  }>>): Promise<RESULT>;
137
- generateTextResponse(prompt: OpenAIChatMessage[], options?: FunctionOptions<OpenAIChatSettings>): Promise<{
137
+ generateTextResponse(prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings>): Promise<{
138
138
  object: "chat.completion";
139
139
  model: string;
140
140
  id: string;
@@ -159,7 +159,7 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
159
159
  }[];
160
160
  }>;
161
161
  extractText(response: OpenAIChatResponse): string;
162
- generateDeltaStreamResponse(prompt: OpenAIChatMessage[], options?: FunctionOptions<OpenAIChatSettings>): Promise<AsyncIterable<DeltaEvent<OpenAIChatDelta>>>;
162
+ generateDeltaStreamResponse(prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings>): Promise<AsyncIterable<DeltaEvent<OpenAIChatDelta>>>;
163
163
  extractTextDelta(fullDelta: OpenAIChatDelta): string | undefined;
164
164
  /**
165
165
  * JSON generation uses the OpenAI GPT function calling API.
@@ -168,7 +168,7 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
168
168
  *
169
169
  * @see https://platform.openai.com/docs/guides/gpt/function-calling
170
170
  */
171
- generateJsonResponse(prompt: OpenAIChatSingleFunctionPrompt<unknown> | OpenAIChatAutoFunctionPrompt<Array<OpenAIFunctionDescription<unknown>>>, options?: FunctionOptions<OpenAIChatSettings> | undefined): PromiseLike<OpenAIChatResponse>;
171
+ generateJsonResponse(prompt: OpenAIChatSingleFunctionPrompt<unknown> | OpenAIChatAutoFunctionPrompt<Array<OpenAIFunctionDescription<unknown>>>, options?: ModelFunctionOptions<OpenAIChatSettings> | undefined): PromiseLike<OpenAIChatResponse>;
172
172
  extractJson(response: OpenAIChatResponse): unknown;
173
173
  withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, OpenAIChatMessage[]>): PromptFormatTextGenerationModel<INPUT_PROMPT, OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatDelta, OpenAIChatSettings, this>;
174
174
  withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
@@ -1,6 +1,6 @@
1
1
  import { z } from "zod";
2
2
  import { AbstractModel } from "../../model-function/AbstractModel.js";
3
- import { FunctionOptions } from "../../model-function/FunctionOptions.js";
3
+ import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
4
4
  import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-function/generate-image/ImageGenerationModel.js";
5
5
  import { RetryFunction } from "../../util/api/RetryFunction.js";
6
6
  import { ThrottleFunction } from "../../util/api/ThrottleFunction.js";
@@ -31,8 +31,8 @@ export declare class StabilityImageGenerationModel extends AbstractModel<Stabili
31
31
  readonly provider: "stability";
32
32
  get modelName(): string;
33
33
  private get apiKey();
34
- callAPI(input: StabilityImageGenerationPrompt, options?: FunctionOptions<StabilityImageGenerationModelSettings>): Promise<StabilityImageGenerationResponse>;
35
- generateImageResponse(prompt: StabilityImageGenerationPrompt, options?: FunctionOptions<StabilityImageGenerationModelSettings>): Promise<{
34
+ callAPI(input: StabilityImageGenerationPrompt, options?: ModelFunctionOptions<StabilityImageGenerationModelSettings>): Promise<StabilityImageGenerationResponse>;
35
+ generateImageResponse(prompt: StabilityImageGenerationPrompt, options?: ModelFunctionOptions<StabilityImageGenerationModelSettings>): Promise<{
36
36
  artifacts: {
37
37
  seed: number;
38
38
  base64: string;
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "modelfusion",
3
3
  "description": "Build AI applications, chatbots, and agents with JavaScript and TypeScript.",
4
- "version": "0.20.1",
4
+ "version": "0.22.0",
5
5
  "author": "Lars Grammel",
6
6
  "license": "MIT",
7
7
  "keywords": [
@@ -1,4 +1,4 @@
1
- import { FunctionOptions } from "../model-function/FunctionOptions.js";
1
+ import { ModelFunctionOptions } from "../model-function/ModelFunctionOptions.js";
2
2
  import { DeltaEvent } from "../model-function/generate-text/DeltaEvent.js";
3
3
  import { TextGenerationModel, TextGenerationModelSettings } from "../model-function/generate-text/TextGenerationModel.js";
4
4
  import { PromptFormat } from "./PromptFormat.js";
@@ -14,9 +14,9 @@ export declare class PromptFormatTextGenerationModel<PROMPT, MODEL_PROMPT, RESPO
14
14
  get tokenizer(): MODEL["tokenizer"];
15
15
  get contextWindowSize(): MODEL["contextWindowSize"];
16
16
  get countPromptTokens(): MODEL["countPromptTokens"] extends undefined ? undefined : (prompt: PROMPT) => PromiseLike<number>;
17
- generateTextResponse(prompt: PROMPT, options?: FunctionOptions<SETTINGS>): PromiseLike<RESPONSE>;
17
+ generateTextResponse(prompt: PROMPT, options?: ModelFunctionOptions<SETTINGS>): PromiseLike<RESPONSE>;
18
18
  extractText(response: RESPONSE): string;
19
- get generateDeltaStreamResponse(): MODEL["generateDeltaStreamResponse"] extends undefined ? undefined : (prompt: PROMPT, options: FunctionOptions<SETTINGS>) => PromiseLike<AsyncIterable<DeltaEvent<FULL_DELTA>>>;
19
+ get generateDeltaStreamResponse(): MODEL["generateDeltaStreamResponse"] extends undefined ? undefined : (prompt: PROMPT, options: ModelFunctionOptions<SETTINGS>) => PromiseLike<AsyncIterable<DeltaEvent<FULL_DELTA>>>;
20
20
  get extractTextDelta(): MODEL["extractTextDelta"];
21
21
  withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, PROMPT>): PromptFormatTextGenerationModel<INPUT_PROMPT, PROMPT, RESPONSE, FULL_DELTA, SETTINGS, this>;
22
22
  withSettings(additionalSettings: Partial<SETTINGS>): this;
@@ -33,6 +33,7 @@ class SimilarTextChunksFromVectorIndexRetriever {
33
33
  if (options?.settings != null) {
34
34
  return this.withSettings(options.settings).retrieveTextChunks(query, {
35
35
  functionId: options.functionId,
36
+ observers: options.observers,
36
37
  run: options.run,
37
38
  });
38
39
  }
@@ -1,4 +1,4 @@
1
- import { FunctionOptions } from "../model-function/FunctionOptions.js";
1
+ import { ModelFunctionOptions } from "../model-function/ModelFunctionOptions.js";
2
2
  import { TextEmbeddingModel, TextEmbeddingModelSettings } from "../model-function/embed-text/TextEmbeddingModel.js";
3
3
  import { TextChunk } from "./TextChunk.js";
4
4
  import { TextChunkRetriever, TextChunkRetrieverSettings } from "./retrieve-text-chunks/TextChunkRetriever.js";
@@ -15,6 +15,6 @@ export declare class SimilarTextChunksFromVectorIndexRetriever<CHUNK extends Tex
15
15
  vectorIndex: VectorIndex<CHUNK, INDEX>;
16
16
  embeddingModel: TextEmbeddingModel<unknown, SETTINGS>;
17
17
  } & SimilarTextChunksFromVectorIndexRetrieverSettings);
18
- retrieveTextChunks(query: string, options?: FunctionOptions<TextChunkRetrieverSettings>): Promise<CHUNK[]>;
18
+ retrieveTextChunks(query: string, options?: ModelFunctionOptions<TextChunkRetrieverSettings>): Promise<CHUNK[]>;
19
19
  withSettings(additionalSettings: Partial<SimilarTextChunksFromVectorIndexRetrieverSettings>): this;
20
20
  }
@@ -30,6 +30,7 @@ export class SimilarTextChunksFromVectorIndexRetriever {
30
30
  if (options?.settings != null) {
31
31
  return this.withSettings(options.settings).retrieveTextChunks(query, {
32
32
  functionId: options.functionId,
33
+ observers: options.observers,
33
34
  run: options.run,
34
35
  });
35
36
  }
@@ -1,8 +1,8 @@
1
- import { FunctionOptions } from "../../model-function/FunctionOptions.js";
1
+ import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
2
2
  import { TextChunk } from "../TextChunk.js";
3
3
  export interface TextChunkRetrieverSettings {
4
4
  }
5
5
  export interface TextChunkRetriever<CHUNK extends TextChunk, QUERY, SETTINGS extends TextChunkRetrieverSettings> {
6
- retrieveTextChunks(query: QUERY, options?: FunctionOptions<TextChunkRetrieverSettings>): Promise<CHUNK[]>;
6
+ retrieveTextChunks(query: QUERY, options?: ModelFunctionOptions<TextChunkRetrieverSettings>): Promise<CHUNK[]>;
7
7
  withSettings(additionalSettings: Partial<SETTINGS>): this;
8
8
  }
@@ -1,6 +1,6 @@
1
- import { FunctionOptions } from "../../model-function/FunctionOptions.js";
1
+ import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
2
2
  import { TextChunk } from "../TextChunk.js";
3
3
  import { TextChunkRetriever, TextChunkRetrieverSettings } from "./TextChunkRetriever.js";
4
- export declare function retrieveTextChunks<CHUNK extends TextChunk, QUERY, SETTINGS extends TextChunkRetrieverSettings>(retriever: TextChunkRetriever<CHUNK, QUERY, SETTINGS>, query: QUERY, options?: FunctionOptions<SETTINGS>): Promise<{
4
+ export declare function retrieveTextChunks<CHUNK extends TextChunk, QUERY, SETTINGS extends TextChunkRetrieverSettings>(retriever: TextChunkRetriever<CHUNK, QUERY, SETTINGS>, query: QUERY, options?: ModelFunctionOptions<SETTINGS>): Promise<{
5
5
  chunks: CHUNK[];
6
6
  }>;
@@ -1,4 +1,4 @@
1
- import { RunFunction } from "../../run/RunFunction.js";
2
- export type SplitFunction = RunFunction<{
1
+ import { FunctionOptions } from "../../core/FunctionOptions.js";
2
+ export type SplitFunction = (input: {
3
3
  text: string;
4
- }, Array<string>>;
4
+ }, options?: FunctionOptions) => PromiseLike<Array<string>>;
@@ -1,4 +1,4 @@
1
- import { FunctionOptions } from "../model-function/FunctionOptions.js";
1
+ import { ModelFunctionOptions } from "../model-function/ModelFunctionOptions.js";
2
2
  import { TextEmbeddingModel, TextEmbeddingModelSettings } from "../model-function/embed-text/TextEmbeddingModel.js";
3
3
  import { TextChunk } from "./TextChunk.js";
4
4
  import { VectorIndex } from "../vector-index/VectorIndex.js";
@@ -8,4 +8,4 @@ export declare function upsertTextChunks<CHUNK extends TextChunk, SETTINGS exten
8
8
  generateId?: () => string;
9
9
  chunks: CHUNK[];
10
10
  ids?: Array<string | undefined>;
11
- }, options?: FunctionOptions<SETTINGS>): Promise<void>;
11
+ }, options?: ModelFunctionOptions<SETTINGS>): Promise<void>;
@@ -1,22 +1,11 @@
1
- import { RunFunctionFinishedEventMetadata, RunFunctionStartedEventMetadata } from "../run/RunFunctionEvent.js";
2
- import { Tool } from "./Tool.js";
3
- export type ExecuteToolStartedEvent = {
4
- type: "execute-tool-started";
5
- metadata: RunFunctionStartedEventMetadata;
6
- tool: Tool<string, unknown, unknown>;
1
+ import { BaseFunctionFinishedEvent, BaseFunctionStartedEvent } from "../core/FunctionEvent.js";
2
+ export interface ExecuteToolStartedEvent extends BaseFunctionStartedEvent {
3
+ functionType: "execute-tool";
4
+ toolName: string;
7
5
  input: unknown;
8
- };
9
- export type ExecuteToolFinishedEvent = {
10
- type: "execute-tool-finished";
11
- metadata: RunFunctionFinishedEventMetadata;
12
- tool: Tool<string, unknown, unknown>;
6
+ }
7
+ export interface ExecuteToolFinishedEvent extends BaseFunctionFinishedEvent {
8
+ functionType: "execute-tool";
9
+ toolName: string;
13
10
  input: unknown;
14
- } & ({
15
- status: "success";
16
- output: unknown;
17
- } | {
18
- status: "failure";
19
- error: unknown;
20
- } | {
21
- status: "abort";
22
- });
11
+ }
package/tool/Tool.d.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import { z } from "zod";
2
2
  import { SchemaDefinition } from "../model-function/generate-json/SchemaDefinition.js";
3
- import { RunFunction } from "../run/RunFunction.js";
3
+ import { FunctionOptions } from "../core/FunctionOptions.js";
4
4
  /**
5
5
  * A tool is a function with a name, description and defined inputs that can be used
6
6
  * by agents and chatbots.
@@ -28,13 +28,13 @@ export declare class Tool<NAME extends string, INPUT, OUTPUT> {
28
28
  /**
29
29
  * The actual execution function of the tool.
30
30
  */
31
- readonly execute: RunFunction<INPUT, OUTPUT>;
31
+ readonly execute: (input: INPUT, options?: FunctionOptions) => PromiseLike<OUTPUT>;
32
32
  constructor({ name, description, inputSchema, outputSchema, execute, }: {
33
33
  name: NAME;
34
34
  description: string;
35
35
  inputSchema: z.ZodSchema<INPUT>;
36
36
  outputSchema?: z.ZodSchema<OUTPUT>;
37
- execute(input: INPUT): Promise<OUTPUT>;
37
+ execute(input: INPUT, options?: FunctionOptions): PromiseLike<OUTPUT>;
38
38
  });
39
39
  /**
40
40
  * Provdes a schema definition with the name, description and schema of the input.
@@ -16,6 +16,31 @@ const createInputSchema = (description) =>
16
16
  zod_1.z.object({
17
17
  query: zod_1.z.string().describe(description),
18
18
  });
19
+ /**
20
+ * A tool for searching the web.
21
+ *
22
+ * The input schema takes a query string.
23
+ * ```ts
24
+ * {
25
+ * query: "How many people live in Berlin?"
26
+ * }
27
+ * ```
28
+ *
29
+ * The output schema is an array of search results with title, link and snippet.
30
+ * ```ts
31
+ * {
32
+ * results:
33
+ * [
34
+ * {
35
+ * title: "Berlin - Wikipedia",
36
+ * link: "https://en.wikipedia.org/wiki/Berlin",
37
+ * snippet: "Berlin is the capital and largest city of Germany by...",
38
+ * },
39
+ * ...
40
+ * ]
41
+ * }
42
+ * ```
43
+ */
19
44
  class WebSearchTool extends Tool_js_1.Tool {
20
45
  constructor({ name, description, queryDescription = "Search query", execute, }) {
21
46
  super({
@@ -1,4 +1,33 @@
1
+ import { z } from "zod";
2
+ import { FunctionOptions } from "../core/FunctionOptions.js";
1
3
  import { Tool } from "./Tool.js";
4
+ declare const OUTPUT_SCHEMA: z.ZodObject<{
5
+ results: z.ZodArray<z.ZodObject<{
6
+ title: z.ZodString;
7
+ link: z.ZodString;
8
+ snippet: z.ZodString;
9
+ }, "strip", z.ZodTypeAny, {
10
+ link: string;
11
+ title: string;
12
+ snippet: string;
13
+ }, {
14
+ link: string;
15
+ title: string;
16
+ snippet: string;
17
+ }>, "many">;
18
+ }, "strip", z.ZodTypeAny, {
19
+ results: {
20
+ link: string;
21
+ title: string;
22
+ snippet: string;
23
+ }[];
24
+ }, {
25
+ results: {
26
+ link: string;
27
+ title: string;
28
+ snippet: string;
29
+ }[];
30
+ }>;
2
31
  export type WebSearchToolInput = {
3
32
  query: string;
4
33
  };
@@ -9,11 +38,38 @@ export type WebSearchToolOutput = {
9
38
  snippet: string;
10
39
  }[];
11
40
  };
41
+ /**
42
+ * A tool for searching the web.
43
+ *
44
+ * The input schema takes a query string.
45
+ * ```ts
46
+ * {
47
+ * query: "How many people live in Berlin?"
48
+ * }
49
+ * ```
50
+ *
51
+ * The output schema is an array of search results with title, link and snippet.
52
+ * ```ts
53
+ * {
54
+ * results:
55
+ * [
56
+ * {
57
+ * title: "Berlin - Wikipedia",
58
+ * link: "https://en.wikipedia.org/wiki/Berlin",
59
+ * snippet: "Berlin is the capital and largest city of Germany by...",
60
+ * },
61
+ * ...
62
+ * ]
63
+ * }
64
+ * ```
65
+ */
12
66
  export declare class WebSearchTool<NAME extends string> extends Tool<NAME, WebSearchToolInput, WebSearchToolOutput> {
67
+ readonly outputSchema: typeof OUTPUT_SCHEMA;
13
68
  constructor({ name, description, queryDescription, execute, }: {
14
69
  name: NAME;
15
70
  description: string;
16
71
  queryDescription?: string;
17
- execute(input: WebSearchToolInput): Promise<WebSearchToolOutput>;
72
+ execute(input: WebSearchToolInput, options?: FunctionOptions): PromiseLike<WebSearchToolOutput>;
18
73
  });
19
74
  }
75
+ export {};
@@ -13,6 +13,31 @@ const createInputSchema = (description) =>
13
13
  z.object({
14
14
  query: z.string().describe(description),
15
15
  });
16
+ /**
17
+ * A tool for searching the web.
18
+ *
19
+ * The input schema takes a query string.
20
+ * ```ts
21
+ * {
22
+ * query: "How many people live in Berlin?"
23
+ * }
24
+ * ```
25
+ *
26
+ * The output schema is an array of search results with title, link and snippet.
27
+ * ```ts
28
+ * {
29
+ * results:
30
+ * [
31
+ * {
32
+ * title: "Berlin - Wikipedia",
33
+ * link: "https://en.wikipedia.org/wiki/Berlin",
34
+ * snippet: "Berlin is the capital and largest city of Germany by...",
35
+ * },
36
+ * ...
37
+ * ]
38
+ * }
39
+ * ```
40
+ */
16
41
  export class WebSearchTool extends Tool {
17
42
  constructor({ name, description, queryDescription = "Search query", execute, }) {
18
43
  super({
@@ -2,7 +2,8 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.executeTool = exports.ExecuteToolPromise = void 0;
4
4
  const nanoid_1 = require("nanoid");
5
- const RunFunctionEventSource_js_1 = require("../run/RunFunctionEventSource.cjs");
5
+ const FunctionEventSource_js_1 = require("../core/FunctionEventSource.cjs");
6
+ const GlobalFunctionObservers_js_1 = require("../core/GlobalFunctionObservers.cjs");
6
7
  const DurationMeasurement_js_1 = require("../util/DurationMeasurement.cjs");
7
8
  const AbortError_js_1 = require("../util/api/AbortError.cjs");
8
9
  const runSafe_js_1 = require("../util/runSafe.cjs");
@@ -52,48 +53,56 @@ exports.executeTool = executeTool;
52
53
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
53
54
  async function doExecuteTool(tool, input, options) {
54
55
  const run = options?.run;
55
- const eventSource = new RunFunctionEventSource_js_1.RunFunctionEventSource({
56
- observers: run?.observers ?? [],
56
+ const eventSource = new FunctionEventSource_js_1.FunctionEventSource({
57
+ observers: [
58
+ ...(0, GlobalFunctionObservers_js_1.getGlobalFunctionObservers)(),
59
+ ...(run?.observers ?? []),
60
+ ...(options?.observers ?? []),
61
+ ],
57
62
  errorHandler: run?.errorHandler,
58
63
  });
59
64
  const durationMeasurement = (0, DurationMeasurement_js_1.startDurationMeasurement)();
60
- const startMetadata = {
65
+ const metadata = {
66
+ functionType: "execute-tool",
61
67
  callId: `call-${(0, nanoid_1.nanoid)()}`,
62
68
  runId: run?.runId,
63
69
  sessionId: run?.sessionId,
64
70
  userId: run?.userId,
65
71
  functionId: options?.functionId,
66
- startEpochSeconds: durationMeasurement.startEpochSeconds,
67
- };
68
- eventSource.notifyRunFunctionStarted({
69
- type: "execute-tool-started",
70
- metadata: startMetadata,
71
- tool: tool,
72
+ toolName: tool.name,
72
73
  input,
74
+ };
75
+ eventSource.notify({
76
+ ...metadata,
77
+ eventType: "started",
78
+ timestamp: durationMeasurement.startDate,
79
+ startTimestamp: durationMeasurement.startDate,
73
80
  });
74
81
  const result = await (0, runSafe_js_1.runSafe)(() => tool.execute(input, options));
75
82
  const finishMetadata = {
76
- ...startMetadata,
83
+ ...metadata,
84
+ eventType: "finished",
85
+ timestamp: new Date(),
86
+ startTimestamp: durationMeasurement.startDate,
87
+ finishTimestamp: new Date(),
77
88
  durationInMs: durationMeasurement.durationInMs,
78
89
  };
79
90
  if (!result.ok) {
80
91
  if (result.isAborted) {
81
- eventSource.notifyRunFunctionFinished({
82
- type: "execute-tool-finished",
83
- status: "abort",
84
- metadata: finishMetadata,
85
- tool: tool,
86
- input,
92
+ eventSource.notify({
93
+ ...finishMetadata,
94
+ result: {
95
+ status: "abort",
96
+ },
87
97
  });
88
98
  throw new AbortError_js_1.AbortError();
89
99
  }
90
- eventSource.notifyRunFunctionFinished({
91
- type: "execute-tool-finished",
92
- status: "failure",
93
- metadata: finishMetadata,
94
- tool: tool,
95
- input,
96
- error: result.error,
100
+ eventSource.notify({
101
+ ...finishMetadata,
102
+ result: {
103
+ status: "error",
104
+ error: result.error,
105
+ },
97
106
  });
98
107
  throw new ToolExecutionError_js_1.ToolExecutionError({
99
108
  toolName: tool.name,
@@ -104,13 +113,12 @@ async function doExecuteTool(tool, input, options) {
104
113
  });
105
114
  }
106
115
  const output = result.output;
107
- eventSource.notifyRunFunctionFinished({
108
- type: "execute-tool-finished",
109
- status: "success",
110
- metadata: finishMetadata,
111
- tool: tool,
112
- input,
113
- output,
116
+ eventSource.notify({
117
+ ...finishMetadata,
118
+ result: {
119
+ status: "success",
120
+ output,
121
+ },
114
122
  });
115
123
  return {
116
124
  output,