modelfusion 0.20.1 → 0.22.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (149) hide show
  1. package/README.md +5 -7
  2. package/composed-function/summarize/SummarizationFunction.d.ts +3 -3
  3. package/composed-function/summarize/summarizeRecursively.d.ts +1 -1
  4. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +1 -1
  5. package/{run → core}/ConsoleLogger.cjs +1 -4
  6. package/core/ConsoleLogger.d.ts +5 -0
  7. package/core/ConsoleLogger.js +5 -0
  8. package/{run → core}/DefaultRun.cjs +2 -5
  9. package/{run → core}/DefaultRun.d.ts +5 -5
  10. package/{run → core}/DefaultRun.js +2 -5
  11. package/core/FunctionEvent.d.ts +75 -0
  12. package/{run/RunFunctionEventSource.js → core/FunctionEventSource.cjs} +7 -13
  13. package/core/FunctionEventSource.d.ts +12 -0
  14. package/{run/RunFunctionEventSource.cjs → core/FunctionEventSource.js} +3 -17
  15. package/core/FunctionObserver.d.ts +7 -0
  16. package/core/FunctionOptions.d.ts +19 -0
  17. package/core/GlobalFunctionObservers.cjs +12 -0
  18. package/core/GlobalFunctionObservers.d.ts +3 -0
  19. package/core/GlobalFunctionObservers.js +7 -0
  20. package/{run → core}/Run.d.ts +2 -2
  21. package/{run → core}/index.cjs +5 -5
  22. package/core/index.d.ts +9 -0
  23. package/core/index.js +9 -0
  24. package/index.cjs +1 -1
  25. package/index.d.ts +1 -1
  26. package/index.js +1 -1
  27. package/model-function/Model.d.ts +5 -2
  28. package/model-function/ModelCallEvent.d.ts +20 -5
  29. package/model-function/ModelFunctionOptions.d.ts +4 -0
  30. package/model-function/SuccessfulModelCall.cjs +6 -16
  31. package/model-function/SuccessfulModelCall.d.ts +2 -2
  32. package/model-function/SuccessfulModelCall.js +6 -16
  33. package/model-function/embed-text/TextEmbeddingEvent.d.ts +15 -17
  34. package/model-function/embed-text/TextEmbeddingModel.d.ts +3 -3
  35. package/model-function/embed-text/embedText.cjs +6 -67
  36. package/model-function/embed-text/embedText.d.ts +4 -4
  37. package/model-function/embed-text/embedText.js +6 -67
  38. package/model-function/executeCall.cjs +49 -14
  39. package/model-function/executeCall.d.ts +9 -10
  40. package/model-function/executeCall.js +49 -14
  41. package/model-function/generate-image/ImageGenerationEvent.d.ts +12 -16
  42. package/model-function/generate-image/ImageGenerationModel.d.ts +2 -2
  43. package/model-function/generate-image/generateImage.cjs +2 -30
  44. package/model-function/generate-image/generateImage.d.ts +2 -2
  45. package/model-function/generate-image/generateImage.js +2 -30
  46. package/model-function/generate-json/GenerateJsonModel.d.ts +2 -2
  47. package/model-function/generate-json/GenerateJsonOrTextModel.d.ts +2 -2
  48. package/model-function/generate-json/JsonGenerationEvent.d.ts +6 -21
  49. package/model-function/generate-json/JsonTextGenerationModel.d.ts +2 -2
  50. package/model-function/generate-json/generateJson.cjs +2 -30
  51. package/model-function/generate-json/generateJson.d.ts +2 -2
  52. package/model-function/generate-json/generateJson.js +2 -30
  53. package/model-function/generate-json/generateJsonOrText.cjs +2 -30
  54. package/model-function/generate-json/generateJsonOrText.d.ts +2 -2
  55. package/model-function/generate-json/generateJsonOrText.js +2 -30
  56. package/model-function/generate-text/TextGenerationEvent.d.ts +13 -15
  57. package/model-function/generate-text/TextGenerationModel.d.ts +3 -3
  58. package/model-function/generate-text/TextStreamingEvent.d.ts +7 -22
  59. package/model-function/generate-text/generateText.cjs +2 -30
  60. package/model-function/generate-text/generateText.d.ts +2 -2
  61. package/model-function/generate-text/generateText.js +2 -30
  62. package/model-function/generate-text/streamText.cjs +52 -42
  63. package/model-function/generate-text/streamText.d.ts +5 -5
  64. package/model-function/generate-text/streamText.js +52 -42
  65. package/model-function/index.cjs +1 -1
  66. package/model-function/index.d.ts +1 -1
  67. package/model-function/index.js +1 -1
  68. package/model-function/synthesize-speech/SpeechSynthesisEvent.d.ts +14 -15
  69. package/model-function/synthesize-speech/SpeechSynthesisModel.d.ts +2 -2
  70. package/model-function/synthesize-speech/synthesizeSpeech.cjs +2 -30
  71. package/model-function/synthesize-speech/synthesizeSpeech.d.ts +2 -2
  72. package/model-function/synthesize-speech/synthesizeSpeech.js +2 -30
  73. package/model-function/transcribe-speech/TranscriptionEvent.d.ts +12 -16
  74. package/model-function/transcribe-speech/TranscriptionModel.d.ts +2 -2
  75. package/model-function/transcribe-speech/transcribe.cjs +2 -30
  76. package/model-function/transcribe-speech/transcribe.d.ts +2 -2
  77. package/model-function/transcribe-speech/transcribe.js +2 -30
  78. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +3 -3
  79. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +6 -6
  80. package/model-provider/cohere/CohereTextGenerationModel.d.ts +4 -4
  81. package/model-provider/cohere/CohereTokenizer.d.ts +1 -1
  82. package/model-provider/elevenlabs/ElevenLabsSpeechSynthesisModel.d.ts +2 -2
  83. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +3 -3
  84. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +3 -3
  85. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +3 -3
  86. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +4 -4
  87. package/model-provider/llamacpp/LlamaCppTokenizer.d.ts +1 -1
  88. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +3 -3
  89. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +3 -3
  90. package/model-provider/openai/OpenAITextGenerationModel.d.ts +4 -4
  91. package/model-provider/openai/OpenAITranscriptionModel.d.ts +3 -3
  92. package/model-provider/openai/chat/OpenAIChatModel.d.ts +5 -5
  93. package/model-provider/stability/StabilityImageGenerationModel.d.ts +3 -3
  94. package/package.json +1 -1
  95. package/prompt/PromptFormatTextGenerationModel.d.ts +3 -3
  96. package/text-chunk/SimilarTextChunksFromVectorIndexRetriever.cjs +1 -0
  97. package/text-chunk/SimilarTextChunksFromVectorIndexRetriever.d.ts +2 -2
  98. package/text-chunk/SimilarTextChunksFromVectorIndexRetriever.js +1 -0
  99. package/text-chunk/retrieve-text-chunks/TextChunkRetriever.d.ts +2 -2
  100. package/text-chunk/retrieve-text-chunks/retrieveTextChunks.d.ts +2 -2
  101. package/text-chunk/split/SplitFunction.d.ts +3 -3
  102. package/text-chunk/upsertTextChunks.d.ts +2 -2
  103. package/tool/ExecuteToolEvent.d.ts +9 -20
  104. package/tool/Tool.d.ts +3 -3
  105. package/tool/WebSearchTool.cjs +25 -0
  106. package/tool/WebSearchTool.d.ts +57 -1
  107. package/tool/WebSearchTool.js +25 -0
  108. package/tool/executeTool.cjs +39 -31
  109. package/tool/executeTool.d.ts +5 -4
  110. package/tool/executeTool.js +39 -31
  111. package/tool/useTool.cjs +2 -6
  112. package/tool/useTool.d.ts +2 -2
  113. package/tool/useTool.js +2 -6
  114. package/tool/useToolOrGenerateText.cjs +1 -3
  115. package/tool/useToolOrGenerateText.d.ts +2 -2
  116. package/tool/useToolOrGenerateText.js +1 -3
  117. package/util/DurationMeasurement.cjs +6 -0
  118. package/util/DurationMeasurement.d.ts +1 -0
  119. package/util/DurationMeasurement.js +6 -0
  120. package/util/api/postToApi.cjs +8 -0
  121. package/util/api/postToApi.js +8 -0
  122. package/vector-index/VectorIndex.d.ts +1 -1
  123. package/vector-index/memory/MemoryVectorIndex.d.ts +1 -1
  124. package/vector-index/pinecone/PineconeVectorIndex.d.ts +1 -1
  125. package/model-function/FunctionOptions.d.ts +0 -6
  126. package/run/ConsoleLogger.d.ts +0 -6
  127. package/run/ConsoleLogger.js +0 -8
  128. package/run/IdMetadata.d.ts +0 -7
  129. package/run/RunFunction.d.ts +0 -9
  130. package/run/RunFunctionEvent.d.ts +0 -12
  131. package/run/RunFunctionEventSource.d.ts +0 -13
  132. package/run/RunFunctionObserver.cjs +0 -2
  133. package/run/RunFunctionObserver.d.ts +0 -5
  134. package/run/RunFunctionObserver.js +0 -1
  135. package/run/index.d.ts +0 -9
  136. package/run/index.js +0 -9
  137. /package/{model-function/FunctionOptions.cjs → core/FunctionEvent.cjs} +0 -0
  138. /package/{model-function/FunctionOptions.js → core/FunctionEvent.js} +0 -0
  139. /package/{run/IdMetadata.cjs → core/FunctionObserver.cjs} +0 -0
  140. /package/{run/IdMetadata.js → core/FunctionObserver.js} +0 -0
  141. /package/{run/Run.cjs → core/FunctionOptions.cjs} +0 -0
  142. /package/{run/Run.js → core/FunctionOptions.js} +0 -0
  143. /package/{run/RunFunction.cjs → core/Run.cjs} +0 -0
  144. /package/{run/RunFunction.js → core/Run.js} +0 -0
  145. /package/{run → core}/Vector.cjs +0 -0
  146. /package/{run → core}/Vector.d.ts +0 -0
  147. /package/{run → core}/Vector.js +0 -0
  148. /package/{run/RunFunctionEvent.cjs → model-function/ModelFunctionOptions.cjs} +0 -0
  149. /package/{run/RunFunctionEvent.js → model-function/ModelFunctionOptions.js} +0 -0
@@ -1,22 +1,7 @@
1
- import { ModelCallFinishedEventMetadata, ModelCallStartedEventMetadata } from "../ModelCallEvent.js";
2
- export type JsonGenerationStartedEvent = {
3
- type: "json-generation-started" | "json-or-text-generation-started";
4
- metadata: ModelCallStartedEventMetadata;
5
- settings: unknown;
6
- prompt: unknown;
1
+ import { BaseModelCallFinishedEvent, BaseModelCallStartedEvent } from "../ModelCallEvent.js";
2
+ export interface JsonGenerationStartedEvent extends BaseModelCallStartedEvent {
3
+ functionType: "json-generation" | "json-or-text-generation";
4
+ }
5
+ export type JsonGenerationFinishedEvent = BaseModelCallFinishedEvent & {
6
+ functionType: "json-generation" | "json-or-text-generation";
7
7
  };
8
- export type JsonGenerationFinishedEvent = {
9
- type: "json-generation-finished" | "json-or-text-generation-finished";
10
- metadata: ModelCallFinishedEventMetadata;
11
- settings: unknown;
12
- prompt: unknown;
13
- } & ({
14
- status: "success";
15
- response: unknown;
16
- generatedJson: unknown;
17
- } | {
18
- status: "failure";
19
- error: unknown;
20
- } | {
21
- status: "abort";
22
- });
@@ -2,7 +2,7 @@ import { TextGenerationModel, TextGenerationModelSettings } from "../generate-te
2
2
  import { SchemaDefinition } from "./SchemaDefinition.js";
3
3
  import { InstructionWithSchema } from "./InstructionWithSchemaPrompt.js";
4
4
  import { GenerateJsonModel } from "./GenerateJsonModel.js";
5
- import { FunctionOptions } from "../FunctionOptions.js";
5
+ import { ModelFunctionOptions } from "../ModelFunctionOptions.js";
6
6
  export type JsonTextPromptFormat = {
7
7
  createPrompt: (prompt: {
8
8
  instruction: string;
@@ -19,7 +19,7 @@ export declare class JsonTextGenerationModel<SETTINGS extends TextGenerationMode
19
19
  });
20
20
  get modelInformation(): import("../ModelInformation.js").ModelInformation;
21
21
  get settings(): SETTINGS;
22
- generateJsonResponse(prompt: InstructionWithSchema<string, unknown>, options?: FunctionOptions<SETTINGS> | undefined): Promise<string>;
22
+ generateJsonResponse(prompt: InstructionWithSchema<string, unknown>, options?: ModelFunctionOptions<SETTINGS> | undefined): Promise<string>;
23
23
  extractJson(response: string): unknown;
24
24
  withSettings(additionalSettings: Partial<SETTINGS>): this;
25
25
  }
@@ -6,6 +6,8 @@ const SchemaValidationError_js_1 = require("./SchemaValidationError.cjs");
6
6
  function generateJson(model, schemaDefinition, prompt, options) {
7
7
  const expandedPrompt = prompt(schemaDefinition);
8
8
  return (0, executeCall_js_1.executeCall)({
9
+ functionType: "json-generation",
10
+ input: expandedPrompt,
9
11
  model,
10
12
  options,
11
13
  generateResponse: (options) => model.generateJsonResponse(expandedPrompt, options),
@@ -21,36 +23,6 @@ function generateJson(model, schemaDefinition, prompt, options) {
21
23
  }
22
24
  return parseResult.data;
23
25
  },
24
- getStartEvent: (metadata, settings) => ({
25
- type: "json-generation-started",
26
- metadata,
27
- settings,
28
- prompt,
29
- }),
30
- getAbortEvent: (metadata, settings) => ({
31
- type: "json-generation-finished",
32
- status: "abort",
33
- metadata,
34
- settings,
35
- prompt,
36
- }),
37
- getFailureEvent: (metadata, settings, error) => ({
38
- type: "json-generation-finished",
39
- status: "failure",
40
- metadata,
41
- settings,
42
- prompt,
43
- error,
44
- }),
45
- getSuccessEvent: (metadata, settings, response, output) => ({
46
- type: "json-generation-finished",
47
- status: "success",
48
- metadata,
49
- settings,
50
- prompt,
51
- response,
52
- generatedJson: output,
53
- }),
54
26
  });
55
27
  }
56
28
  exports.generateJson = generateJson;
@@ -1,5 +1,5 @@
1
- import { FunctionOptions } from "../FunctionOptions.js";
1
+ import { ModelFunctionOptions } from "../ModelFunctionOptions.js";
2
2
  import { ModelFunctionPromise } from "../executeCall.js";
3
3
  import { GenerateJsonModel, GenerateJsonModelSettings } from "./GenerateJsonModel.js";
4
4
  import { SchemaDefinition } from "./SchemaDefinition.js";
5
- export declare function generateJson<STRUCTURE, PROMPT, RESPONSE, NAME extends string, SETTINGS extends GenerateJsonModelSettings>(model: GenerateJsonModel<PROMPT, RESPONSE, SETTINGS>, schemaDefinition: SchemaDefinition<NAME, STRUCTURE>, prompt: (schemaDefinition: SchemaDefinition<NAME, STRUCTURE>) => PROMPT, options?: FunctionOptions<SETTINGS>): ModelFunctionPromise<GenerateJsonModel<PROMPT, RESPONSE, SETTINGS>, STRUCTURE, RESPONSE>;
5
+ export declare function generateJson<STRUCTURE, PROMPT, RESPONSE, NAME extends string, SETTINGS extends GenerateJsonModelSettings>(model: GenerateJsonModel<PROMPT, RESPONSE, SETTINGS>, schemaDefinition: SchemaDefinition<NAME, STRUCTURE>, prompt: (schemaDefinition: SchemaDefinition<NAME, STRUCTURE>) => PROMPT, options?: ModelFunctionOptions<SETTINGS>): ModelFunctionPromise<GenerateJsonModel<PROMPT, RESPONSE, SETTINGS>, STRUCTURE, RESPONSE>;
@@ -3,6 +3,8 @@ import { SchemaValidationError } from "./SchemaValidationError.js";
3
3
  export function generateJson(model, schemaDefinition, prompt, options) {
4
4
  const expandedPrompt = prompt(schemaDefinition);
5
5
  return executeCall({
6
+ functionType: "json-generation",
7
+ input: expandedPrompt,
6
8
  model,
7
9
  options,
8
10
  generateResponse: (options) => model.generateJsonResponse(expandedPrompt, options),
@@ -18,35 +20,5 @@ export function generateJson(model, schemaDefinition, prompt, options) {
18
20
  }
19
21
  return parseResult.data;
20
22
  },
21
- getStartEvent: (metadata, settings) => ({
22
- type: "json-generation-started",
23
- metadata,
24
- settings,
25
- prompt,
26
- }),
27
- getAbortEvent: (metadata, settings) => ({
28
- type: "json-generation-finished",
29
- status: "abort",
30
- metadata,
31
- settings,
32
- prompt,
33
- }),
34
- getFailureEvent: (metadata, settings, error) => ({
35
- type: "json-generation-finished",
36
- status: "failure",
37
- metadata,
38
- settings,
39
- prompt,
40
- error,
41
- }),
42
- getSuccessEvent: (metadata, settings, response, output) => ({
43
- type: "json-generation-finished",
44
- status: "success",
45
- metadata,
46
- settings,
47
- prompt,
48
- response,
49
- generatedJson: output,
50
- }),
51
23
  });
52
24
  }
@@ -7,6 +7,8 @@ const SchemaValidationError_js_1 = require("./SchemaValidationError.cjs");
7
7
  function generateJsonOrText(model, schemaDefinitions, prompt, options) {
8
8
  const expandedPrompt = prompt(schemaDefinitions);
9
9
  return (0, executeCall_js_1.executeCall)({
10
+ functionType: "json-or-text-generation",
11
+ input: expandedPrompt,
10
12
  model,
11
13
  options,
12
14
  generateResponse: (options) => model.generateJsonResponse(expandedPrompt, options),
@@ -34,36 +36,6 @@ function generateJsonOrText(model, schemaDefinitions, prompt, options) {
34
36
  text: text, // text is string | null, which is part of the response for schema values
35
37
  };
36
38
  },
37
- getStartEvent: (metadata, settings) => ({
38
- type: "json-or-text-generation-started",
39
- metadata,
40
- settings,
41
- prompt,
42
- }),
43
- getAbortEvent: (metadata, settings) => ({
44
- type: "json-or-text-generation-finished",
45
- status: "abort",
46
- metadata,
47
- settings,
48
- prompt,
49
- }),
50
- getFailureEvent: (metadata, settings, error) => ({
51
- type: "json-or-text-generation-finished",
52
- status: "failure",
53
- metadata,
54
- settings,
55
- prompt,
56
- error,
57
- }),
58
- getSuccessEvent: (metadata, settings, response, output) => ({
59
- type: "json-or-text-generation-finished",
60
- status: "success",
61
- metadata,
62
- settings,
63
- prompt,
64
- response,
65
- generatedJson: output,
66
- }),
67
39
  });
68
40
  }
69
41
  exports.generateJsonOrText = generateJsonOrText;
@@ -1,4 +1,4 @@
1
- import { FunctionOptions } from "../FunctionOptions.js";
1
+ import { ModelFunctionOptions } from "../ModelFunctionOptions.js";
2
2
  import { ModelFunctionPromise } from "../executeCall.js";
3
3
  import { GenerateJsonOrTextModel, GenerateJsonOrTextModelSettings, GenerateJsonOrTextPrompt } from "./GenerateJsonOrTextModel.js";
4
4
  import { SchemaDefinition } from "./SchemaDefinition.js";
@@ -14,7 +14,7 @@ type ToSchemaUnion<T> = {
14
14
  } : never;
15
15
  }[keyof T];
16
16
  type ToOutputValue<SCHEMAS extends SchemaDefinitionArray<SchemaDefinition<any, any>[]>> = ToSchemaUnion<ToSchemaDefinitionsMap<SCHEMAS>>;
17
- export declare function generateJsonOrText<SCHEMAS extends SchemaDefinition<any, any>[], PROMPT, RESPONSE, SETTINGS extends GenerateJsonOrTextModelSettings>(model: GenerateJsonOrTextModel<PROMPT, RESPONSE, SETTINGS>, schemaDefinitions: SCHEMAS, prompt: (schemaDefinitions: SCHEMAS) => PROMPT & GenerateJsonOrTextPrompt<RESPONSE>, options?: FunctionOptions<SETTINGS>): ModelFunctionPromise<GenerateJsonOrTextModel<PROMPT, RESPONSE, SETTINGS>, {
17
+ export declare function generateJsonOrText<SCHEMAS extends SchemaDefinition<any, any>[], PROMPT, RESPONSE, SETTINGS extends GenerateJsonOrTextModelSettings>(model: GenerateJsonOrTextModel<PROMPT, RESPONSE, SETTINGS>, schemaDefinitions: SCHEMAS, prompt: (schemaDefinitions: SCHEMAS) => PROMPT & GenerateJsonOrTextPrompt<RESPONSE>, options?: ModelFunctionOptions<SETTINGS>): ModelFunctionPromise<GenerateJsonOrTextModel<PROMPT, RESPONSE, SETTINGS>, {
18
18
  schema: null;
19
19
  value: null;
20
20
  text: string;
@@ -4,6 +4,8 @@ import { SchemaValidationError } from "./SchemaValidationError.js";
4
4
  export function generateJsonOrText(model, schemaDefinitions, prompt, options) {
5
5
  const expandedPrompt = prompt(schemaDefinitions);
6
6
  return executeCall({
7
+ functionType: "json-or-text-generation",
8
+ input: expandedPrompt,
7
9
  model,
8
10
  options,
9
11
  generateResponse: (options) => model.generateJsonResponse(expandedPrompt, options),
@@ -31,35 +33,5 @@ export function generateJsonOrText(model, schemaDefinitions, prompt, options) {
31
33
  text: text, // text is string | null, which is part of the response for schema values
32
34
  };
33
35
  },
34
- getStartEvent: (metadata, settings) => ({
35
- type: "json-or-text-generation-started",
36
- metadata,
37
- settings,
38
- prompt,
39
- }),
40
- getAbortEvent: (metadata, settings) => ({
41
- type: "json-or-text-generation-finished",
42
- status: "abort",
43
- metadata,
44
- settings,
45
- prompt,
46
- }),
47
- getFailureEvent: (metadata, settings, error) => ({
48
- type: "json-or-text-generation-finished",
49
- status: "failure",
50
- metadata,
51
- settings,
52
- prompt,
53
- error,
54
- }),
55
- getSuccessEvent: (metadata, settings, response, output) => ({
56
- type: "json-or-text-generation-finished",
57
- status: "success",
58
- metadata,
59
- settings,
60
- prompt,
61
- response,
62
- generatedJson: output,
63
- }),
64
36
  });
65
37
  }
@@ -1,22 +1,20 @@
1
- import { ModelCallFinishedEventMetadata, ModelCallStartedEventMetadata } from "../ModelCallEvent.js";
2
- export type TextGenerationStartedEvent = {
3
- type: "text-generation-started";
4
- metadata: ModelCallStartedEventMetadata;
5
- settings: unknown;
1
+ import { BaseModelCallFinishedEvent, BaseModelCallStartedEvent } from "../ModelCallEvent.js";
2
+ export interface TextGenerationStartedEvent extends BaseModelCallStartedEvent {
3
+ functionType: "text-generation";
6
4
  prompt: unknown;
7
- };
8
- export type TextGenerationFinishedEvent = {
9
- type: "text-generation-finished";
10
- metadata: ModelCallFinishedEventMetadata;
11
- settings: unknown;
12
- prompt: unknown;
13
- } & ({
5
+ }
6
+ export type TextGenerationFinishedEventResult = {
14
7
  status: "success";
15
8
  response: unknown;
16
- generatedText: string;
9
+ output: string;
17
10
  } | {
18
- status: "failure";
11
+ status: "error";
19
12
  error: unknown;
20
13
  } | {
21
14
  status: "abort";
22
- });
15
+ };
16
+ export interface TextGenerationFinishedEvent extends BaseModelCallFinishedEvent {
17
+ functionType: "text-generation";
18
+ prompt: unknown;
19
+ result: TextGenerationFinishedEventResult;
20
+ }
@@ -1,6 +1,6 @@
1
1
  import { PromptFormat } from "../../prompt/PromptFormat.js";
2
2
  import { PromptFormatTextGenerationModel } from "../../prompt/PromptFormatTextGenerationModel.js";
3
- import { FunctionOptions } from "../FunctionOptions.js";
3
+ import { ModelFunctionOptions } from "../ModelFunctionOptions.js";
4
4
  import { Model, ModelSettings } from "../Model.js";
5
5
  import { BasicTokenizer, FullTokenizer } from "../tokenize-text/Tokenizer.js";
6
6
  import { DeltaEvent } from "./DeltaEvent.js";
@@ -28,12 +28,12 @@ export interface TextGenerationModel<PROMPT, RESPONSE, FULL_DELTA, SETTINGS exte
28
28
  * Optional. Implement if you have a tokenizer and want to count the number of tokens in a prompt.
29
29
  */
30
30
  readonly countPromptTokens: ((prompt: PROMPT) => PromiseLike<number>) | undefined;
31
- generateTextResponse(prompt: PROMPT, options?: FunctionOptions<SETTINGS>): PromiseLike<RESPONSE>;
31
+ generateTextResponse(prompt: PROMPT, options?: ModelFunctionOptions<SETTINGS>): PromiseLike<RESPONSE>;
32
32
  extractText(response: RESPONSE): string;
33
33
  /**
34
34
  * Optional. Implement for streaming support.
35
35
  */
36
- readonly generateDeltaStreamResponse: ((prompt: PROMPT, options: FunctionOptions<SETTINGS>) => PromiseLike<AsyncIterable<DeltaEvent<FULL_DELTA>>>) | undefined;
36
+ readonly generateDeltaStreamResponse: ((prompt: PROMPT, options: ModelFunctionOptions<SETTINGS>) => PromiseLike<AsyncIterable<DeltaEvent<FULL_DELTA>>>) | undefined;
37
37
  /**
38
38
  * Optional. Implement for streaming support.
39
39
  */
@@ -1,22 +1,7 @@
1
- import { ModelCallFinishedEventMetadata, ModelCallStartedEventMetadata } from "../ModelCallEvent.js";
2
- export type TextStreamingStartedEvent = {
3
- type: "text-streaming-started";
4
- metadata: ModelCallStartedEventMetadata;
5
- settings: unknown;
6
- prompt: unknown;
7
- };
8
- export type TextStreamingFinishedEvent = {
9
- type: "text-streaming-finished";
10
- metadata: ModelCallFinishedEventMetadata;
11
- settings: unknown;
12
- prompt: unknown;
13
- } & ({
14
- status: "success";
15
- response: unknown;
16
- generatedText: string;
17
- } | {
18
- status: "failure";
19
- error: unknown;
20
- } | {
21
- status: "abort";
22
- });
1
+ import { BaseModelCallFinishedEvent, BaseModelCallStartedEvent } from "../ModelCallEvent.js";
2
+ export interface TextStreamingStartedEvent extends BaseModelCallStartedEvent {
3
+ functionType: "text-streaming";
4
+ }
5
+ export interface TextStreamingFinishedEvent extends BaseModelCallFinishedEvent {
6
+ functionType: "text-streaming";
7
+ }
@@ -18,6 +18,8 @@ function generateText(
18
18
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
19
19
  model, prompt, options) {
20
20
  return (0, executeCall_js_1.executeCall)({
21
+ functionType: "text-generation",
22
+ input: prompt,
21
23
  model,
22
24
  options,
23
25
  generateResponse: (options) => model.generateTextResponse(prompt, options),
@@ -27,36 +29,6 @@ model, prompt, options) {
27
29
  ? model.extractText(result).trim()
28
30
  : model.extractText(result);
29
31
  },
30
- getStartEvent: (metadata, settings) => ({
31
- type: "text-generation-started",
32
- metadata,
33
- settings,
34
- prompt,
35
- }),
36
- getAbortEvent: (metadata, settings) => ({
37
- type: "text-generation-finished",
38
- status: "abort",
39
- metadata,
40
- settings,
41
- prompt,
42
- }),
43
- getFailureEvent: (metadata, settings, error) => ({
44
- type: "text-generation-finished",
45
- status: "failure",
46
- metadata,
47
- settings,
48
- prompt,
49
- error,
50
- }),
51
- getSuccessEvent: (metadata, settings, response, output) => ({
52
- type: "text-generation-finished",
53
- status: "success",
54
- metadata,
55
- settings,
56
- prompt,
57
- response,
58
- generatedText: output,
59
- }),
60
32
  });
61
33
  }
62
34
  exports.generateText = generateText;
@@ -1,4 +1,4 @@
1
- import { FunctionOptions } from "../FunctionOptions.js";
1
+ import { ModelFunctionOptions } from "../ModelFunctionOptions.js";
2
2
  import { ModelFunctionPromise } from "../executeCall.js";
3
3
  import { TextGenerationModel, TextGenerationModelSettings } from "./TextGenerationModel.js";
4
4
  /**
@@ -13,4 +13,4 @@ import { TextGenerationModel, TextGenerationModelSettings } from "./TextGenerati
13
13
  * "Write a short story about a robot learning to love:\n\n"
14
14
  * );
15
15
  */
16
- export declare function generateText<PROMPT, RESPONSE, SETTINGS extends TextGenerationModelSettings>(model: TextGenerationModel<PROMPT, RESPONSE, any, SETTINGS>, prompt: PROMPT, options?: FunctionOptions<SETTINGS>): ModelFunctionPromise<TextGenerationModel<PROMPT, RESPONSE, any, SETTINGS>, string, RESPONSE>;
16
+ export declare function generateText<PROMPT, RESPONSE, SETTINGS extends TextGenerationModelSettings>(model: TextGenerationModel<PROMPT, RESPONSE, any, SETTINGS>, prompt: PROMPT, options?: ModelFunctionOptions<SETTINGS>): ModelFunctionPromise<TextGenerationModel<PROMPT, RESPONSE, any, SETTINGS>, string, RESPONSE>;
@@ -15,6 +15,8 @@ export function generateText(
15
15
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
16
16
  model, prompt, options) {
17
17
  return executeCall({
18
+ functionType: "text-generation",
19
+ input: prompt,
18
20
  model,
19
21
  options,
20
22
  generateResponse: (options) => model.generateTextResponse(prompt, options),
@@ -24,35 +26,5 @@ model, prompt, options) {
24
26
  ? model.extractText(result).trim()
25
27
  : model.extractText(result);
26
28
  },
27
- getStartEvent: (metadata, settings) => ({
28
- type: "text-generation-started",
29
- metadata,
30
- settings,
31
- prompt,
32
- }),
33
- getAbortEvent: (metadata, settings) => ({
34
- type: "text-generation-finished",
35
- status: "abort",
36
- metadata,
37
- settings,
38
- prompt,
39
- }),
40
- getFailureEvent: (metadata, settings, error) => ({
41
- type: "text-generation-finished",
42
- status: "failure",
43
- metadata,
44
- settings,
45
- prompt,
46
- error,
47
- }),
48
- getSuccessEvent: (metadata, settings, response, output) => ({
49
- type: "text-generation-finished",
50
- status: "success",
51
- metadata,
52
- settings,
53
- prompt,
54
- response,
55
- generatedText: output,
56
- }),
57
29
  });
58
30
  }
@@ -2,7 +2,8 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.streamText = exports.StreamTextPromise = void 0;
4
4
  const nanoid_1 = require("nanoid");
5
- const RunFunctionEventSource_js_1 = require("../../run/RunFunctionEventSource.cjs");
5
+ const FunctionEventSource_js_1 = require("../../core/FunctionEventSource.cjs");
6
+ const GlobalFunctionObservers_js_1 = require("../../core/GlobalFunctionObservers.cjs");
6
7
  const DurationMeasurement_js_1 = require("../../util/DurationMeasurement.cjs");
7
8
  const AbortError_js_1 = require("../../util/api/AbortError.cjs");
8
9
  const runSafe_js_1 = require("../../util/runSafe.cjs");
@@ -50,13 +51,19 @@ async function doStreamText(model, prompt, options) {
50
51
  model = model.withSettings(options.settings);
51
52
  options = {
52
53
  functionId: options.functionId,
54
+ observers: options.observers,
53
55
  run: options.run,
54
56
  };
55
57
  }
56
58
  const run = options?.run;
57
59
  const settings = model.settings;
58
- const eventSource = new RunFunctionEventSource_js_1.RunFunctionEventSource({
59
- observers: [...(settings.observers ?? []), ...(run?.observers ?? [])],
60
+ const eventSource = new FunctionEventSource_js_1.FunctionEventSource({
61
+ observers: [
62
+ ...(0, GlobalFunctionObservers_js_1.getGlobalFunctionObservers)(),
63
+ ...(settings.observers ?? []),
64
+ ...(run?.observers ?? []),
65
+ ...(options?.observers ?? []),
66
+ ],
60
67
  errorHandler: run?.errorHandler,
61
68
  });
62
69
  const durationMeasurement = (0, DurationMeasurement_js_1.startDurationMeasurement)();
@@ -67,13 +74,15 @@ async function doStreamText(model, prompt, options) {
67
74
  userId: run?.userId,
68
75
  functionId: options?.functionId,
69
76
  model: model.modelInformation,
70
- startEpochSeconds: durationMeasurement.startEpochSeconds,
71
- };
72
- eventSource.notifyRunFunctionStarted({
73
- type: "text-streaming-started",
74
- metadata: startMetadata,
77
+ functionType: "text-streaming",
78
+ input: prompt,
75
79
  settings,
76
- prompt,
80
+ timestamp: durationMeasurement.startDate,
81
+ startTimestamp: durationMeasurement.startDate,
82
+ };
83
+ eventSource.notify({
84
+ ...startMetadata,
85
+ eventType: "started",
77
86
  });
78
87
  const result = await (0, runSafe_js_1.runSafe)(async () => (0, extractTextDeltas_js_1.extractTextDeltas)({
79
88
  deltaIterable: await model.generateDeltaStreamResponse(prompt, {
@@ -85,63 +94,64 @@ async function doStreamText(model, prompt, options) {
85
94
  onDone: (fullText, lastFullDelta) => {
86
95
  const finishMetadata = {
87
96
  ...startMetadata,
97
+ eventType: "finished",
98
+ finishTimestamp: new Date(),
88
99
  durationInMs: durationMeasurement.durationInMs,
89
100
  };
90
- eventSource.notifyRunFunctionFinished({
91
- type: "text-streaming-finished",
92
- status: "success",
93
- metadata: finishMetadata,
94
- settings,
95
- prompt,
96
- response: lastFullDelta,
97
- generatedText: fullText,
101
+ eventSource.notify({
102
+ ...finishMetadata,
103
+ result: {
104
+ status: "success",
105
+ response: lastFullDelta,
106
+ output: fullText,
107
+ },
98
108
  });
99
109
  },
100
110
  onError: (error) => {
101
111
  const finishMetadata = {
102
112
  ...startMetadata,
113
+ eventType: "finished",
114
+ finishTimestamp: new Date(),
103
115
  durationInMs: durationMeasurement.durationInMs,
104
116
  };
105
- eventSource.notifyRunFunctionFinished(error instanceof AbortError_js_1.AbortError
117
+ eventSource.notify(error instanceof AbortError_js_1.AbortError
106
118
  ? {
107
- type: "text-streaming-finished",
108
- status: "abort",
109
- metadata: finishMetadata,
110
- settings,
111
- prompt,
119
+ ...finishMetadata,
120
+ result: {
121
+ status: "abort",
122
+ },
112
123
  }
113
124
  : {
114
- type: "text-streaming-finished",
115
- status: "failure",
116
- metadata: finishMetadata,
117
- settings,
118
- prompt,
119
- error,
125
+ ...finishMetadata,
126
+ result: {
127
+ status: "error",
128
+ error,
129
+ },
120
130
  });
121
131
  },
122
132
  }));
123
133
  if (!result.ok) {
124
134
  const finishMetadata = {
125
135
  ...startMetadata,
136
+ eventType: "finished",
137
+ finishTimestamp: new Date(),
126
138
  durationInMs: durationMeasurement.durationInMs,
127
139
  };
128
140
  if (result.isAborted) {
129
- eventSource.notifyRunFunctionFinished({
130
- type: "text-streaming-finished",
131
- status: "abort",
132
- metadata: finishMetadata,
133
- settings,
134
- prompt,
141
+ eventSource.notify({
142
+ ...finishMetadata,
143
+ result: {
144
+ status: "abort",
145
+ },
135
146
  });
136
147
  throw new AbortError_js_1.AbortError();
137
148
  }
138
- eventSource.notifyRunFunctionFinished({
139
- type: "text-streaming-finished",
140
- status: "failure",
141
- metadata: finishMetadata,
142
- settings,
143
- prompt,
144
- error: result.error,
149
+ eventSource.notify({
150
+ ...finishMetadata,
151
+ result: {
152
+ status: "error",
153
+ error: result.error,
154
+ },
145
155
  });
146
156
  throw result.error;
147
157
  }
@@ -1,4 +1,4 @@
1
- import { FunctionOptions } from "../FunctionOptions.js";
1
+ import { ModelFunctionOptions } from "../ModelFunctionOptions.js";
2
2
  import { CallMetadata } from "../executeCall.js";
3
3
  import { DeltaEvent } from "./DeltaEvent.js";
4
4
  import { TextGenerationModel, TextGenerationModelSettings } from "./TextGenerationModel.js";
@@ -7,17 +7,17 @@ export declare class StreamTextPromise<PROMPT, FULL_DELTA, SETTINGS extends Text
7
7
  private outputPromise;
8
8
  constructor(fullPromise: Promise<{
9
9
  output: AsyncIterable<string>;
10
- metadata: Omit<CallMetadata<TextGenerationModel<PROMPT, unknown, FULL_DELTA, SETTINGS>>, "durationInMs">;
10
+ metadata: Omit<CallMetadata<TextGenerationModel<PROMPT, unknown, FULL_DELTA, SETTINGS>>, "durationInMs" | "finishTimestamp">;
11
11
  }>);
12
12
  asFullResponse(): Promise<{
13
13
  output: AsyncIterable<string>;
14
- metadata: Omit<CallMetadata<TextGenerationModel<PROMPT, unknown, FULL_DELTA, SETTINGS>>, "durationInMs">;
14
+ metadata: Omit<CallMetadata<TextGenerationModel<PROMPT, unknown, FULL_DELTA, SETTINGS>>, "durationInMs" | "finishTimestamp">;
15
15
  }>;
16
16
  then<TResult1 = AsyncIterable<string>, TResult2 = never>(onfulfilled?: ((value: AsyncIterable<string>) => TResult1 | PromiseLike<TResult1>) | undefined | null, onrejected?: ((reason: unknown) => TResult2 | PromiseLike<TResult2>) | undefined | null): Promise<TResult1 | TResult2>;
17
17
  catch<TResult = never>(onrejected?: ((reason: unknown) => TResult | PromiseLike<TResult>) | undefined | null): Promise<AsyncIterable<string> | TResult>;
18
18
  finally(onfinally?: (() => void) | undefined | null): Promise<AsyncIterable<string>>;
19
19
  }
20
20
  export declare function streamText<PROMPT, FULL_DELTA, SETTINGS extends TextGenerationModelSettings>(model: TextGenerationModel<PROMPT, unknown, FULL_DELTA, SETTINGS> & {
21
- generateDeltaStreamResponse: (prompt: PROMPT, options: FunctionOptions<SETTINGS>) => PromiseLike<AsyncIterable<DeltaEvent<FULL_DELTA>>>;
21
+ generateDeltaStreamResponse: (prompt: PROMPT, options: ModelFunctionOptions<SETTINGS>) => PromiseLike<AsyncIterable<DeltaEvent<FULL_DELTA>>>;
22
22
  extractTextDelta: (fullDelta: FULL_DELTA) => string | undefined;
23
- }, prompt: PROMPT, options?: FunctionOptions<SETTINGS>): StreamTextPromise<PROMPT, FULL_DELTA, SETTINGS>;
23
+ }, prompt: PROMPT, options?: ModelFunctionOptions<SETTINGS>): StreamTextPromise<PROMPT, FULL_DELTA, SETTINGS>;