modelfusion 0.112.0 → 0.114.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (159) hide show
  1. package/CHANGELOG.md +105 -0
  2. package/README.md +108 -212
  3. package/core/FunctionOptions.d.ts +14 -0
  4. package/core/api/AbstractApiConfiguration.cjs +16 -1
  5. package/core/api/AbstractApiConfiguration.d.ts +7 -3
  6. package/core/api/AbstractApiConfiguration.js +16 -1
  7. package/core/api/ApiConfiguration.d.ts +10 -1
  8. package/core/api/BaseUrlApiConfiguration.cjs +9 -5
  9. package/core/api/BaseUrlApiConfiguration.d.ts +7 -7
  10. package/core/api/BaseUrlApiConfiguration.js +9 -5
  11. package/core/api/CustomHeaderProvider.cjs +2 -0
  12. package/core/api/CustomHeaderProvider.d.ts +2 -0
  13. package/core/api/CustomHeaderProvider.js +1 -0
  14. package/core/api/index.cjs +1 -0
  15. package/core/api/index.d.ts +1 -0
  16. package/core/api/index.js +1 -0
  17. package/core/cache/Cache.cjs +2 -0
  18. package/core/cache/Cache.d.ts +12 -0
  19. package/core/cache/Cache.js +1 -0
  20. package/core/cache/MemoryCache.cjs +23 -0
  21. package/core/cache/MemoryCache.d.ts +15 -0
  22. package/core/cache/MemoryCache.js +19 -0
  23. package/core/cache/index.cjs +18 -0
  24. package/core/cache/index.d.ts +2 -0
  25. package/core/cache/index.js +2 -0
  26. package/core/index.cjs +1 -0
  27. package/core/index.d.ts +1 -0
  28. package/core/index.js +1 -0
  29. package/core/schema/TypeValidationError.cjs +36 -0
  30. package/core/schema/TypeValidationError.d.ts +15 -0
  31. package/core/schema/TypeValidationError.js +32 -0
  32. package/core/schema/index.cjs +2 -0
  33. package/core/schema/index.d.ts +2 -0
  34. package/core/schema/index.js +2 -0
  35. package/core/schema/parseJSON.cjs +6 -14
  36. package/core/schema/parseJSON.d.ts +3 -2
  37. package/core/schema/parseJSON.js +6 -14
  38. package/core/schema/validateTypes.cjs +65 -0
  39. package/core/schema/validateTypes.d.ts +34 -0
  40. package/core/schema/validateTypes.js +60 -0
  41. package/model-function/embed/EmbeddingModel.d.ts +2 -2
  42. package/model-function/executeStandardCall.cjs +3 -1
  43. package/model-function/executeStandardCall.d.ts +2 -2
  44. package/model-function/executeStandardCall.js +3 -1
  45. package/model-function/executeStreamCall.cjs +2 -1
  46. package/model-function/executeStreamCall.d.ts +2 -2
  47. package/model-function/executeStreamCall.js +2 -1
  48. package/model-function/generate-image/ImageGenerationModel.d.ts +2 -2
  49. package/model-function/generate-image/PromptTemplateImageGenerationModel.d.ts +2 -2
  50. package/model-function/generate-speech/SpeechGenerationModel.d.ts +3 -3
  51. package/model-function/generate-structure/StructureFromTextPromptTemplate.d.ts +13 -0
  52. package/model-function/generate-structure/generateStructure.cjs +4 -1
  53. package/model-function/generate-structure/generateStructure.js +4 -1
  54. package/model-function/generate-structure/jsonStructurePrompt.cjs +12 -0
  55. package/model-function/generate-structure/jsonStructurePrompt.d.ts +3 -3
  56. package/model-function/generate-structure/jsonStructurePrompt.js +12 -0
  57. package/model-function/generate-structure/streamStructure.cjs +4 -1
  58. package/model-function/generate-structure/streamStructure.js +4 -1
  59. package/model-function/generate-text/PromptTemplateTextGenerationModel.cjs +3 -0
  60. package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +11 -2
  61. package/model-function/generate-text/PromptTemplateTextGenerationModel.js +3 -0
  62. package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +2 -2
  63. package/model-function/generate-text/TextGenerationModel.d.ts +16 -3
  64. package/model-function/generate-text/generateText.cjs +43 -1
  65. package/model-function/generate-text/generateText.js +43 -1
  66. package/model-function/generate-transcription/TranscriptionModel.d.ts +2 -2
  67. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +20 -8
  68. package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +27 -5
  69. package/model-provider/anthropic/AnthropicTextGenerationModel.js +20 -8
  70. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +8 -3
  71. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +3 -3
  72. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +8 -3
  73. package/model-provider/cohere/CohereTextEmbeddingModel.cjs +8 -3
  74. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
  75. package/model-provider/cohere/CohereTextEmbeddingModel.js +8 -3
  76. package/model-provider/cohere/CohereTextGenerationModel.cjs +20 -8
  77. package/model-provider/cohere/CohereTextGenerationModel.d.ts +45 -5
  78. package/model-provider/cohere/CohereTextGenerationModel.js +20 -8
  79. package/model-provider/cohere/CohereTokenizer.cjs +16 -6
  80. package/model-provider/cohere/CohereTokenizer.d.ts +3 -3
  81. package/model-provider/cohere/CohereTokenizer.js +16 -6
  82. package/model-provider/elevenlabs/ElevenLabsApiConfiguration.cjs +1 -1
  83. package/model-provider/elevenlabs/ElevenLabsApiConfiguration.js +1 -1
  84. package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +8 -3
  85. package/model-provider/elevenlabs/ElevenLabsSpeechModel.d.ts +2 -2
  86. package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +8 -3
  87. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +8 -3
  88. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +3 -3
  89. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +8 -3
  90. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +18 -4
  91. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +21 -3
  92. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +18 -4
  93. package/model-provider/llamacpp/LlamaCppCompletionModel.cjs +20 -8
  94. package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +125 -5
  95. package/model-provider/llamacpp/LlamaCppCompletionModel.js +20 -8
  96. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +8 -3
  97. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +3 -3
  98. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +8 -3
  99. package/model-provider/llamacpp/LlamaCppTokenizer.cjs +8 -3
  100. package/model-provider/llamacpp/LlamaCppTokenizer.d.ts +2 -2
  101. package/model-provider/llamacpp/LlamaCppTokenizer.js +8 -3
  102. package/model-provider/lmnt/LmntSpeechModel.cjs +8 -3
  103. package/model-provider/lmnt/LmntSpeechModel.d.ts +2 -2
  104. package/model-provider/lmnt/LmntSpeechModel.js +8 -3
  105. package/model-provider/mistral/MistralChatModel.cjs +20 -8
  106. package/model-provider/mistral/MistralChatModel.d.ts +55 -5
  107. package/model-provider/mistral/MistralChatModel.js +20 -8
  108. package/model-provider/mistral/MistralTextEmbeddingModel.cjs +8 -3
  109. package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +3 -3
  110. package/model-provider/mistral/MistralTextEmbeddingModel.js +8 -3
  111. package/model-provider/ollama/OllamaChatModel.cjs +35 -8
  112. package/model-provider/ollama/OllamaChatModel.d.ts +31 -5
  113. package/model-provider/ollama/OllamaChatModel.js +35 -8
  114. package/model-provider/ollama/OllamaCompletionModel.cjs +20 -7
  115. package/model-provider/ollama/OllamaCompletionModel.d.ts +43 -5
  116. package/model-provider/ollama/OllamaCompletionModel.js +20 -7
  117. package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +8 -3
  118. package/model-provider/ollama/OllamaTextEmbeddingModel.d.ts +3 -3
  119. package/model-provider/ollama/OllamaTextEmbeddingModel.js +8 -3
  120. package/model-provider/openai/AbstractOpenAIChatModel.cjs +23 -13
  121. package/model-provider/openai/AbstractOpenAIChatModel.d.ts +94 -7
  122. package/model-provider/openai/AbstractOpenAIChatModel.js +23 -13
  123. package/model-provider/openai/AbstractOpenAICompletionModel.cjs +21 -9
  124. package/model-provider/openai/AbstractOpenAICompletionModel.d.ts +35 -5
  125. package/model-provider/openai/AbstractOpenAICompletionModel.js +21 -9
  126. package/model-provider/openai/AzureOpenAIApiConfiguration.cjs +5 -2
  127. package/model-provider/openai/AzureOpenAIApiConfiguration.d.ts +2 -1
  128. package/model-provider/openai/AzureOpenAIApiConfiguration.js +5 -2
  129. package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.cjs +12 -6
  130. package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.d.ts +89 -5
  131. package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.js +12 -6
  132. package/model-provider/openai/OpenAIChatModel.cjs +12 -4
  133. package/model-provider/openai/OpenAIChatModel.d.ts +3 -2
  134. package/model-provider/openai/OpenAIChatModel.js +12 -4
  135. package/model-provider/openai/OpenAIImageGenerationModel.cjs +10 -6
  136. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +4 -4
  137. package/model-provider/openai/OpenAIImageGenerationModel.js +10 -6
  138. package/model-provider/openai/OpenAISpeechModel.cjs +9 -4
  139. package/model-provider/openai/OpenAISpeechModel.d.ts +3 -3
  140. package/model-provider/openai/OpenAISpeechModel.js +9 -4
  141. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +11 -6
  142. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +3 -3
  143. package/model-provider/openai/OpenAITextEmbeddingModel.js +11 -6
  144. package/model-provider/openai/OpenAITranscriptionModel.cjs +9 -6
  145. package/model-provider/openai/OpenAITranscriptionModel.d.ts +4 -4
  146. package/model-provider/openai/OpenAITranscriptionModel.js +9 -6
  147. package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +12 -4
  148. package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +3 -2
  149. package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +12 -4
  150. package/model-provider/stability/StabilityImageGenerationModel.cjs +10 -5
  151. package/model-provider/stability/StabilityImageGenerationModel.d.ts +3 -3
  152. package/model-provider/stability/StabilityImageGenerationModel.js +10 -5
  153. package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +9 -7
  154. package/model-provider/whispercpp/WhisperCppTranscriptionModel.d.ts +3 -3
  155. package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +9 -7
  156. package/observability/helicone/HeliconeOpenAIApiConfiguration.cjs +2 -1
  157. package/observability/helicone/HeliconeOpenAIApiConfiguration.d.ts +3 -1
  158. package/observability/helicone/HeliconeOpenAIApiConfiguration.js +2 -1
  159. package/package.json +2 -2
@@ -1,5 +1,5 @@
1
1
  import { z } from "zod";
2
- import { FunctionOptions } from "../../core/FunctionOptions.js";
2
+ import { FunctionCallOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
@@ -46,11 +46,11 @@ export declare class AnthropicTextGenerationModel extends AbstractModel<Anthropi
46
46
  readonly contextWindowSize: number;
47
47
  readonly tokenizer: undefined;
48
48
  readonly countPromptTokens: undefined;
49
- callAPI<RESPONSE>(prompt: string, options: {
49
+ callAPI<RESPONSE>(prompt: string, callOptions: FunctionCallOptions, options: {
50
50
  responseFormat: AnthropicTextGenerationResponseFormatType<RESPONSE>;
51
- } & FunctionOptions): Promise<RESPONSE>;
51
+ }): Promise<RESPONSE>;
52
52
  get settingsForEvent(): Partial<AnthropicTextGenerationModelSettings>;
53
- doGenerateTexts(prompt: string, options?: FunctionOptions): Promise<{
53
+ doGenerateTexts(prompt: string, options: FunctionCallOptions): Promise<{
54
54
  response: {
55
55
  model: string;
56
56
  completion: string;
@@ -61,8 +61,30 @@ export declare class AnthropicTextGenerationModel extends AbstractModel<Anthropi
61
61
  finishReason: TextGenerationFinishReason;
62
62
  }[];
63
63
  }>;
64
+ restoreGeneratedTexts(rawResponse: unknown): {
65
+ response: {
66
+ model: string;
67
+ completion: string;
68
+ stop_reason: string;
69
+ };
70
+ textGenerationResults: {
71
+ text: string;
72
+ finishReason: TextGenerationFinishReason;
73
+ }[];
74
+ };
75
+ processTextGenerationResponse(response: AnthropicTextGenerationResponse): {
76
+ response: {
77
+ model: string;
78
+ completion: string;
79
+ stop_reason: string;
80
+ };
81
+ textGenerationResults: {
82
+ text: string;
83
+ finishReason: TextGenerationFinishReason;
84
+ }[];
85
+ };
64
86
  private translateFinishReason;
65
- doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<{
87
+ doStreamText(prompt: string, options: FunctionCallOptions): Promise<AsyncIterable<Delta<{
66
88
  model: string;
67
89
  completion: string;
68
90
  stop_reason: string | null;
@@ -3,6 +3,7 @@ import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottl
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
4
  import { zodSchema } from "../../core/schema/ZodSchema.js";
5
5
  import { parseJSON } from "../../core/schema/parseJSON.js";
6
+ import { validateTypes } from "../../core/schema/validateTypes.js";
6
7
  import { AbstractModel } from "../../model-function/AbstractModel.js";
7
8
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
8
9
  import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
@@ -66,17 +67,22 @@ export class AnthropicTextGenerationModel extends AbstractModel {
66
67
  get modelName() {
67
68
  return this.settings.model;
68
69
  }
69
- async callAPI(prompt, options) {
70
+ async callAPI(prompt, callOptions, options) {
70
71
  const api = this.settings.api ?? new AnthropicApiConfiguration();
71
72
  const responseFormat = options.responseFormat;
72
- const abortSignal = options.run?.abortSignal;
73
+ const abortSignal = callOptions.run?.abortSignal;
73
74
  const userId = this.settings.userId;
74
75
  return callWithRetryAndThrottle({
75
76
  retry: api.retry,
76
77
  throttle: api.throttle,
77
78
  call: async () => postJsonToApi({
78
79
  url: api.assembleUrl(`/complete`),
79
- headers: api.headers,
80
+ headers: api.headers({
81
+ functionType: callOptions.functionType,
82
+ functionId: callOptions.functionId,
83
+ run: callOptions.run,
84
+ callId: callOptions.callId,
85
+ }),
80
86
  body: {
81
87
  model: this.settings.model,
82
88
  prompt,
@@ -105,10 +111,17 @@ export class AnthropicTextGenerationModel extends AbstractModel {
105
111
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
106
112
  }
107
113
  async doGenerateTexts(prompt, options) {
108
- const response = await this.callAPI(prompt, {
109
- ...options,
114
+ return this.processTextGenerationResponse(await this.callAPI(prompt, options, {
110
115
  responseFormat: AnthropicTextGenerationResponseFormat.json,
111
- });
116
+ }));
117
+ }
118
+ restoreGeneratedTexts(rawResponse) {
119
+ return this.processTextGenerationResponse(validateTypes({
120
+ structure: rawResponse,
121
+ schema: zodSchema(anthropicTextGenerationResponseSchema),
122
+ }));
123
+ }
124
+ processTextGenerationResponse(response) {
112
125
  return {
113
126
  response,
114
127
  textGenerationResults: [
@@ -130,8 +143,7 @@ export class AnthropicTextGenerationModel extends AbstractModel {
130
143
  }
131
144
  }
132
145
  doStreamText(prompt, options) {
133
- return this.callAPI(prompt, {
134
- ...options,
146
+ return this.callAPI(prompt, options, {
135
147
  responseFormat: AnthropicTextGenerationResponseFormat.deltaIterable,
136
148
  });
137
149
  }
@@ -28,15 +28,20 @@ class Automatic1111ImageGenerationModel extends AbstractModel_js_1.AbstractModel
28
28
  get modelName() {
29
29
  return this.settings.model;
30
30
  }
31
- async callAPI(input, options) {
31
+ async callAPI(input, callOptions) {
32
32
  const api = this.settings.api ?? new Automatic1111ApiConfiguration_js_1.Automatic1111ApiConfiguration();
33
- const abortSignal = options?.run?.abortSignal;
33
+ const abortSignal = callOptions.run?.abortSignal;
34
34
  return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
35
35
  retry: api.retry,
36
36
  throttle: api.throttle,
37
37
  call: async () => (0, postToApi_js_1.postJsonToApi)({
38
38
  url: api.assembleUrl(`/txt2img`),
39
- headers: api.headers,
39
+ headers: api.headers({
40
+ functionType: callOptions.functionType,
41
+ functionId: callOptions.functionId,
42
+ run: callOptions.run,
43
+ callId: callOptions.callId,
44
+ }),
40
45
  body: {
41
46
  prompt: input.prompt,
42
47
  negative_prompt: input.negativePrompt,
@@ -1,5 +1,5 @@
1
1
  import { z } from "zod";
2
- import { FunctionOptions } from "../../core/FunctionOptions.js";
2
+ import { FunctionCallOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
5
  import { PromptTemplate } from "../../model-function/PromptTemplate.js";
@@ -37,9 +37,9 @@ export declare class Automatic1111ImageGenerationModel extends AbstractModel<Aut
37
37
  constructor(settings: Automatic1111ImageGenerationSettings);
38
38
  readonly provider: "Automatic1111";
39
39
  get modelName(): string;
40
- callAPI(input: Automatic1111ImageGenerationPrompt, options?: FunctionOptions): Promise<Automatic1111ImageGenerationResponse>;
40
+ callAPI(input: Automatic1111ImageGenerationPrompt, callOptions: FunctionCallOptions): Promise<Automatic1111ImageGenerationResponse>;
41
41
  get settingsForEvent(): Partial<Automatic1111ImageGenerationSettings>;
42
- doGenerateImages(prompt: Automatic1111ImageGenerationPrompt, options?: FunctionOptions): Promise<{
42
+ doGenerateImages(prompt: Automatic1111ImageGenerationPrompt, options: FunctionCallOptions): Promise<{
43
43
  response: {
44
44
  images: string[];
45
45
  parameters: {};
@@ -25,15 +25,20 @@ export class Automatic1111ImageGenerationModel extends AbstractModel {
25
25
  get modelName() {
26
26
  return this.settings.model;
27
27
  }
28
- async callAPI(input, options) {
28
+ async callAPI(input, callOptions) {
29
29
  const api = this.settings.api ?? new Automatic1111ApiConfiguration();
30
- const abortSignal = options?.run?.abortSignal;
30
+ const abortSignal = callOptions.run?.abortSignal;
31
31
  return callWithRetryAndThrottle({
32
32
  retry: api.retry,
33
33
  throttle: api.throttle,
34
34
  call: async () => postJsonToApi({
35
35
  url: api.assembleUrl(`/txt2img`),
36
- headers: api.headers,
36
+ headers: api.headers({
37
+ functionType: callOptions.functionType,
38
+ functionId: callOptions.functionId,
39
+ run: callOptions.run,
40
+ callId: callOptions.callId,
41
+ }),
37
42
  body: {
38
43
  prompt: input.prompt,
39
44
  negative_prompt: input.negativePrompt,
@@ -113,18 +113,23 @@ class CohereTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
113
113
  async detokenize(tokens) {
114
114
  return this.tokenizer.detokenize(tokens);
115
115
  }
116
- async callAPI(texts, options) {
116
+ async callAPI(texts, callOptions) {
117
117
  if (texts.length > this.maxValuesPerCall) {
118
118
  throw new Error(`The Cohere embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
119
119
  }
120
120
  const api = this.settings.api ?? new CohereApiConfiguration_js_1.CohereApiConfiguration();
121
- const abortSignal = options?.run?.abortSignal;
121
+ const abortSignal = callOptions.run?.abortSignal;
122
122
  return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
123
123
  retry: api.retry,
124
124
  throttle: api.throttle,
125
125
  call: async () => (0, postToApi_js_1.postJsonToApi)({
126
126
  url: api.assembleUrl(`/embed`),
127
- headers: api.headers,
127
+ headers: api.headers({
128
+ functionType: callOptions.functionType,
129
+ functionId: callOptions.functionId,
130
+ run: callOptions.run,
131
+ callId: callOptions.callId,
132
+ }),
128
133
  body: {
129
134
  model: this.settings.model,
130
135
  texts,
@@ -1,5 +1,5 @@
1
1
  import { z } from "zod";
2
- import { FunctionOptions } from "../../core/FunctionOptions.js";
2
+ import { FunctionCallOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
5
  import { EmbeddingModel, EmbeddingModelSettings } from "../../model-function/embed/EmbeddingModel.js";
@@ -70,9 +70,9 @@ export declare class CohereTextEmbeddingModel extends AbstractModel<CohereTextEm
70
70
  tokenTexts: string[];
71
71
  }>;
72
72
  detokenize(tokens: number[]): Promise<string>;
73
- callAPI(texts: Array<string>, options?: FunctionOptions): Promise<CohereTextEmbeddingResponse>;
73
+ callAPI(texts: Array<string>, callOptions: FunctionCallOptions): Promise<CohereTextEmbeddingResponse>;
74
74
  get settingsForEvent(): Partial<CohereTextEmbeddingModelSettings>;
75
- doEmbedValues(texts: string[], options?: FunctionOptions): Promise<{
75
+ doEmbedValues(texts: string[], options: FunctionCallOptions): Promise<{
76
76
  response: {
77
77
  embeddings: number[][];
78
78
  texts: string[];
@@ -110,18 +110,23 @@ export class CohereTextEmbeddingModel extends AbstractModel {
110
110
  async detokenize(tokens) {
111
111
  return this.tokenizer.detokenize(tokens);
112
112
  }
113
- async callAPI(texts, options) {
113
+ async callAPI(texts, callOptions) {
114
114
  if (texts.length > this.maxValuesPerCall) {
115
115
  throw new Error(`The Cohere embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
116
116
  }
117
117
  const api = this.settings.api ?? new CohereApiConfiguration();
118
- const abortSignal = options?.run?.abortSignal;
118
+ const abortSignal = callOptions.run?.abortSignal;
119
119
  return callWithRetryAndThrottle({
120
120
  retry: api.retry,
121
121
  throttle: api.throttle,
122
122
  call: async () => postJsonToApi({
123
123
  url: api.assembleUrl(`/embed`),
124
- headers: api.headers,
124
+ headers: api.headers({
125
+ functionType: callOptions.functionType,
126
+ functionId: callOptions.functionId,
127
+ run: callOptions.run,
128
+ callId: callOptions.callId,
129
+ }),
125
130
  body: {
126
131
  model: this.settings.model,
127
132
  texts,
@@ -5,6 +5,7 @@ const zod_1 = require("zod");
5
5
  const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
6
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
7
  const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
8
+ const validateTypes_js_1 = require("../../core/schema/validateTypes.cjs");
8
9
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
9
10
  const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
10
11
  const TextGenerationModel_js_1 = require("../../model-function/generate-text/TextGenerationModel.cjs");
@@ -73,16 +74,21 @@ class CohereTextGenerationModel extends AbstractModel_js_1.AbstractModel {
73
74
  async countPromptTokens(input) {
74
75
  return (0, countTokens_js_1.countTokens)(this.tokenizer, input);
75
76
  }
76
- async callAPI(prompt, options) {
77
+ async callAPI(prompt, callOptions, options) {
77
78
  const api = this.settings.api ?? new CohereApiConfiguration_js_1.CohereApiConfiguration();
78
79
  const responseFormat = options.responseFormat;
79
- const abortSignal = options.run?.abortSignal;
80
+ const abortSignal = callOptions.run?.abortSignal;
80
81
  return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
81
82
  retry: api.retry,
82
83
  throttle: api.throttle,
83
84
  call: async () => (0, postToApi_js_1.postJsonToApi)({
84
85
  url: api.assembleUrl(`/generate`),
85
- headers: api.headers,
86
+ headers: api.headers({
87
+ functionType: callOptions.functionType,
88
+ functionId: callOptions.functionId,
89
+ run: callOptions.run,
90
+ callId: callOptions.callId,
91
+ }),
86
92
  body: {
87
93
  stream: responseFormat.stream,
88
94
  model: this.settings.model,
@@ -122,10 +128,17 @@ class CohereTextGenerationModel extends AbstractModel_js_1.AbstractModel {
122
128
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
123
129
  }
124
130
  async doGenerateTexts(prompt, options) {
125
- const response = await this.callAPI(prompt, {
126
- ...options,
131
+ return this.processTextGenerationResponse(await this.callAPI(prompt, options, {
127
132
  responseFormat: exports.CohereTextGenerationResponseFormat.json,
128
- });
133
+ }));
134
+ }
135
+ restoreGeneratedTexts(rawResponse) {
136
+ return this.processTextGenerationResponse((0, validateTypes_js_1.validateTypes)({
137
+ structure: rawResponse,
138
+ schema: (0, ZodSchema_js_1.zodSchema)(cohereTextGenerationResponseSchema),
139
+ }));
140
+ }
141
+ processTextGenerationResponse(response) {
129
142
  return {
130
143
  response,
131
144
  textGenerationResults: response.generations.map((generation) => ({
@@ -149,8 +162,7 @@ class CohereTextGenerationModel extends AbstractModel_js_1.AbstractModel {
149
162
  }
150
163
  }
151
164
  doStreamText(prompt, options) {
152
- return this.callAPI(prompt, {
153
- ...options,
165
+ return this.callAPI(prompt, options, {
154
166
  responseFormat: exports.CohereTextGenerationResponseFormat.deltaIterable,
155
167
  });
156
168
  }
@@ -1,5 +1,5 @@
1
1
  import { z } from "zod";
2
- import { FunctionOptions } from "../../core/FunctionOptions.js";
2
+ import { FunctionCallOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
@@ -54,11 +54,11 @@ export declare class CohereTextGenerationModel extends AbstractModel<CohereTextG
54
54
  readonly contextWindowSize: number;
55
55
  readonly tokenizer: CohereTokenizer;
56
56
  countPromptTokens(input: string): Promise<number>;
57
- callAPI<RESPONSE>(prompt: string, options: {
57
+ callAPI<RESPONSE>(prompt: string, callOptions: FunctionCallOptions, options: {
58
58
  responseFormat: CohereTextGenerationResponseFormatType<RESPONSE>;
59
- } & FunctionOptions): Promise<RESPONSE>;
59
+ }): Promise<RESPONSE>;
60
60
  get settingsForEvent(): Partial<CohereTextGenerationModelSettings>;
61
- doGenerateTexts(prompt: string, options?: FunctionOptions): Promise<{
61
+ doGenerateTexts(prompt: string, options: FunctionCallOptions): Promise<{
62
62
  response: {
63
63
  id: string;
64
64
  prompt: string;
@@ -78,8 +78,48 @@ export declare class CohereTextGenerationModel extends AbstractModel<CohereTextG
78
78
  finishReason: TextGenerationFinishReason;
79
79
  }[];
80
80
  }>;
81
+ restoreGeneratedTexts(rawResponse: unknown): {
82
+ response: {
83
+ id: string;
84
+ prompt: string;
85
+ generations: {
86
+ text: string;
87
+ id: string;
88
+ finish_reason?: string | undefined;
89
+ }[];
90
+ meta?: {
91
+ api_version: {
92
+ version: string;
93
+ };
94
+ } | undefined;
95
+ };
96
+ textGenerationResults: {
97
+ text: string;
98
+ finishReason: TextGenerationFinishReason;
99
+ }[];
100
+ };
101
+ processTextGenerationResponse(response: CohereTextGenerationResponse): {
102
+ response: {
103
+ id: string;
104
+ prompt: string;
105
+ generations: {
106
+ text: string;
107
+ id: string;
108
+ finish_reason?: string | undefined;
109
+ }[];
110
+ meta?: {
111
+ api_version: {
112
+ version: string;
113
+ };
114
+ } | undefined;
115
+ };
116
+ textGenerationResults: {
117
+ text: string;
118
+ finishReason: TextGenerationFinishReason;
119
+ }[];
120
+ };
81
121
  private translateFinishReason;
82
- doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
122
+ doStreamText(prompt: string, options: FunctionCallOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
83
123
  text: string;
84
124
  is_finished: false;
85
125
  } | {
@@ -2,6 +2,7 @@ import { z } from "zod";
2
2
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
4
  import { zodSchema } from "../../core/schema/ZodSchema.js";
5
+ import { validateTypes } from "../../core/schema/validateTypes.js";
5
6
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
7
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
7
8
  import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
@@ -70,16 +71,21 @@ export class CohereTextGenerationModel extends AbstractModel {
70
71
  async countPromptTokens(input) {
71
72
  return countTokens(this.tokenizer, input);
72
73
  }
73
- async callAPI(prompt, options) {
74
+ async callAPI(prompt, callOptions, options) {
74
75
  const api = this.settings.api ?? new CohereApiConfiguration();
75
76
  const responseFormat = options.responseFormat;
76
- const abortSignal = options.run?.abortSignal;
77
+ const abortSignal = callOptions.run?.abortSignal;
77
78
  return callWithRetryAndThrottle({
78
79
  retry: api.retry,
79
80
  throttle: api.throttle,
80
81
  call: async () => postJsonToApi({
81
82
  url: api.assembleUrl(`/generate`),
82
- headers: api.headers,
83
+ headers: api.headers({
84
+ functionType: callOptions.functionType,
85
+ functionId: callOptions.functionId,
86
+ run: callOptions.run,
87
+ callId: callOptions.callId,
88
+ }),
83
89
  body: {
84
90
  stream: responseFormat.stream,
85
91
  model: this.settings.model,
@@ -119,10 +125,17 @@ export class CohereTextGenerationModel extends AbstractModel {
119
125
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
120
126
  }
121
127
  async doGenerateTexts(prompt, options) {
122
- const response = await this.callAPI(prompt, {
123
- ...options,
128
+ return this.processTextGenerationResponse(await this.callAPI(prompt, options, {
124
129
  responseFormat: CohereTextGenerationResponseFormat.json,
125
- });
130
+ }));
131
+ }
132
+ restoreGeneratedTexts(rawResponse) {
133
+ return this.processTextGenerationResponse(validateTypes({
134
+ structure: rawResponse,
135
+ schema: zodSchema(cohereTextGenerationResponseSchema),
136
+ }));
137
+ }
138
+ processTextGenerationResponse(response) {
126
139
  return {
127
140
  response,
128
141
  textGenerationResults: response.generations.map((generation) => ({
@@ -146,8 +159,7 @@ export class CohereTextGenerationModel extends AbstractModel {
146
159
  }
147
160
  }
148
161
  doStreamText(prompt, options) {
149
- return this.callAPI(prompt, {
150
- ...options,
162
+ return this.callAPI(prompt, options, {
151
163
  responseFormat: CohereTextGenerationResponseFormat.deltaIterable,
152
164
  });
153
165
  }
@@ -33,15 +33,20 @@ class CohereTokenizer {
33
33
  });
34
34
  this.settings = settings;
35
35
  }
36
- async callTokenizeAPI(text, context) {
36
+ async callTokenizeAPI(text, callOptions) {
37
37
  const api = this.settings.api ?? new CohereApiConfiguration_js_1.CohereApiConfiguration();
38
- const abortSignal = context?.abortSignal;
38
+ const abortSignal = callOptions?.run?.abortSignal;
39
39
  return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
40
40
  retry: api.retry,
41
41
  throttle: api.throttle,
42
42
  call: async () => (0, postToApi_js_1.postJsonToApi)({
43
43
  url: api.assembleUrl(`/tokenize`),
44
- headers: api.headers,
44
+ headers: api.headers({
45
+ functionType: "tokenize",
46
+ functionId: callOptions?.functionId,
47
+ run: callOptions?.run,
48
+ callId: "",
49
+ }),
45
50
  body: {
46
51
  model: this.settings.model,
47
52
  text,
@@ -52,15 +57,20 @@ class CohereTokenizer {
52
57
  }),
53
58
  });
54
59
  }
55
- async callDeTokenizeAPI(tokens, context) {
60
+ async callDeTokenizeAPI(tokens, callOptions) {
56
61
  const api = this.settings.api ?? new CohereApiConfiguration_js_1.CohereApiConfiguration();
57
- const abortSignal = context?.abortSignal;
62
+ const abortSignal = callOptions?.run?.abortSignal;
58
63
  return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
59
64
  retry: api.retry,
60
65
  throttle: api.throttle,
61
66
  call: async () => (0, postToApi_js_1.postJsonToApi)({
62
67
  url: api.assembleUrl(`/detokenize`),
63
- headers: api.headers,
68
+ headers: api.headers({
69
+ functionType: "detokenize",
70
+ functionId: callOptions?.functionId,
71
+ run: callOptions?.run,
72
+ callId: "",
73
+ }),
64
74
  body: {
65
75
  model: this.settings.model,
66
76
  tokens,
@@ -1,5 +1,5 @@
1
1
  import { z } from "zod";
2
- import { Run } from "../../core/Run.js";
2
+ import { FunctionCallOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { FullTokenizer } from "../../model-function/tokenize-text/Tokenizer.js";
5
5
  import { CohereTextEmbeddingModelType } from "./CohereTextEmbeddingModel.js";
@@ -28,8 +28,8 @@ export interface CohereTokenizerSettings {
28
28
  export declare class CohereTokenizer implements FullTokenizer {
29
29
  readonly settings: CohereTokenizerSettings;
30
30
  constructor(settings: CohereTokenizerSettings);
31
- callTokenizeAPI(text: string, context?: Run): Promise<CohereTokenizationResponse>;
32
- callDeTokenizeAPI(tokens: number[], context?: Run): Promise<CohereDetokenizationResponse>;
31
+ callTokenizeAPI(text: string, callOptions?: FunctionCallOptions): Promise<CohereTokenizationResponse>;
32
+ callDeTokenizeAPI(tokens: number[], callOptions?: FunctionCallOptions): Promise<CohereDetokenizationResponse>;
33
33
  tokenize(text: string): Promise<number[]>;
34
34
  tokenizeWithTexts(text: string): Promise<{
35
35
  tokens: number[];
@@ -30,15 +30,20 @@ export class CohereTokenizer {
30
30
  });
31
31
  this.settings = settings;
32
32
  }
33
- async callTokenizeAPI(text, context) {
33
+ async callTokenizeAPI(text, callOptions) {
34
34
  const api = this.settings.api ?? new CohereApiConfiguration();
35
- const abortSignal = context?.abortSignal;
35
+ const abortSignal = callOptions?.run?.abortSignal;
36
36
  return callWithRetryAndThrottle({
37
37
  retry: api.retry,
38
38
  throttle: api.throttle,
39
39
  call: async () => postJsonToApi({
40
40
  url: api.assembleUrl(`/tokenize`),
41
- headers: api.headers,
41
+ headers: api.headers({
42
+ functionType: "tokenize",
43
+ functionId: callOptions?.functionId,
44
+ run: callOptions?.run,
45
+ callId: "",
46
+ }),
42
47
  body: {
43
48
  model: this.settings.model,
44
49
  text,
@@ -49,15 +54,20 @@ export class CohereTokenizer {
49
54
  }),
50
55
  });
51
56
  }
52
- async callDeTokenizeAPI(tokens, context) {
57
+ async callDeTokenizeAPI(tokens, callOptions) {
53
58
  const api = this.settings.api ?? new CohereApiConfiguration();
54
- const abortSignal = context?.abortSignal;
59
+ const abortSignal = callOptions?.run?.abortSignal;
55
60
  return callWithRetryAndThrottle({
56
61
  retry: api.retry,
57
62
  throttle: api.throttle,
58
63
  call: async () => postJsonToApi({
59
64
  url: api.assembleUrl(`/detokenize`),
60
- headers: api.headers,
65
+ headers: api.headers({
66
+ functionType: "detokenize",
67
+ functionId: callOptions?.functionId,
68
+ run: callOptions?.run,
69
+ callId: "",
70
+ }),
61
71
  body: {
62
72
  model: this.settings.model,
63
73
  tokens,
@@ -27,7 +27,7 @@ class ElevenLabsApiConfiguration extends BaseUrlApiConfiguration_js_1.BaseUrlApi
27
27
  });
28
28
  }
29
29
  get apiKey() {
30
- return this.headers["xi-api-key"];
30
+ return this.fixedHeadersValue["xi-api-key"];
31
31
  }
32
32
  }
33
33
  exports.ElevenLabsApiConfiguration = ElevenLabsApiConfiguration;
@@ -24,6 +24,6 @@ export class ElevenLabsApiConfiguration extends BaseUrlApiConfigurationWithDefau
24
24
  });
25
25
  }
26
26
  get apiKey() {
27
- return this.headers["xi-api-key"];
27
+ return this.fixedHeadersValue["xi-api-key"];
28
28
  }
29
29
  }
@@ -38,9 +38,9 @@ class ElevenLabsSpeechModel extends AbstractModel_js_1.AbstractModel {
38
38
  get modelName() {
39
39
  return this.settings.voice;
40
40
  }
41
- async callAPI(text, options) {
41
+ async callAPI(text, callOptions) {
42
42
  const api = this.settings.api ?? new ElevenLabsApiConfiguration_js_1.ElevenLabsApiConfiguration();
43
- const abortSignal = options?.run?.abortSignal;
43
+ const abortSignal = callOptions?.run?.abortSignal;
44
44
  return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
45
45
  retry: api.retry,
46
46
  throttle: api.throttle,
@@ -49,7 +49,12 @@ class ElevenLabsSpeechModel extends AbstractModel_js_1.AbstractModel {
49
49
  optimize_streaming_latency: this.settings.optimizeStreamingLatency,
50
50
  output_format: this.settings.outputFormat,
51
51
  })}`),
52
- headers: api.headers,
52
+ headers: api.headers({
53
+ functionType: callOptions.functionType,
54
+ functionId: callOptions.functionId,
55
+ run: callOptions.run,
56
+ callId: callOptions.callId,
57
+ }),
53
58
  body: {
54
59
  text,
55
60
  model_id: this.settings.model ?? defaultModel,
@@ -1,5 +1,5 @@
1
1
  /// <reference types="node" />
2
- import { FunctionOptions } from "../../core/FunctionOptions.js";
2
+ import { FunctionCallOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
5
  import { Delta } from "../../model-function/Delta.js";
@@ -37,7 +37,7 @@ export declare class ElevenLabsSpeechModel extends AbstractModel<ElevenLabsSpeec
37
37
  get modelName(): string;
38
38
  private callAPI;
39
39
  get settingsForEvent(): Partial<ElevenLabsSpeechModelSettings>;
40
- doGenerateSpeechStandard(text: string, options?: FunctionOptions): Promise<Buffer>;
40
+ doGenerateSpeechStandard(text: string, options: FunctionCallOptions): Promise<Buffer>;
41
41
  doGenerateSpeechStreamDuplex(textStream: AsyncIterable<string>): Promise<AsyncIterable<Delta<Buffer>>>;
42
42
  withSettings(additionalSettings: Partial<ElevenLabsSpeechModelSettings>): this;
43
43
  }