modelfusion 0.116.1 → 0.118.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. package/CHANGELOG.md +67 -0
  2. package/README.md +14 -11
  3. package/core/getFunctionCallLogger.cjs +6 -6
  4. package/core/getFunctionCallLogger.js +6 -6
  5. package/model-function/ModelCallEvent.d.ts +1 -1
  6. package/model-function/embed/EmbeddingEvent.d.ts +1 -1
  7. package/model-function/embed/EmbeddingModel.d.ts +1 -1
  8. package/model-function/embed/embed.cjs +5 -5
  9. package/model-function/embed/embed.d.ts +2 -2
  10. package/model-function/embed/embed.js +5 -5
  11. package/model-function/executeStandardCall.cjs +3 -3
  12. package/model-function/executeStandardCall.d.ts +2 -2
  13. package/model-function/executeStandardCall.js +3 -3
  14. package/model-function/generate-image/ImageGenerationEvent.d.ts +1 -1
  15. package/model-function/generate-image/ImageGenerationModel.d.ts +1 -1
  16. package/model-function/generate-image/PromptTemplateImageGenerationModel.d.ts +1 -1
  17. package/model-function/generate-image/generateImage.cjs +2 -2
  18. package/model-function/generate-image/generateImage.d.ts +1 -1
  19. package/model-function/generate-image/generateImage.js +2 -2
  20. package/model-function/generate-speech/SpeechGenerationEvent.d.ts +1 -1
  21. package/model-function/generate-speech/generateSpeech.cjs +2 -2
  22. package/model-function/generate-speech/generateSpeech.d.ts +1 -1
  23. package/model-function/generate-speech/generateSpeech.js +2 -2
  24. package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +1 -1
  25. package/model-function/generate-structure/StructureFromTextGenerationModel.js +1 -1
  26. package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +1 -1
  27. package/model-function/generate-structure/StructureFromTextStreamingModel.js +1 -1
  28. package/model-function/generate-structure/StructureGenerationEvent.d.ts +1 -1
  29. package/model-function/generate-structure/generateStructure.cjs +2 -2
  30. package/model-function/generate-structure/generateStructure.d.ts +1 -1
  31. package/model-function/generate-structure/generateStructure.js +2 -2
  32. package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +2 -2
  33. package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +2 -2
  34. package/model-function/generate-text/TextGenerationEvent.d.ts +1 -1
  35. package/model-function/generate-text/TextGenerationModel.d.ts +2 -2
  36. package/model-function/generate-text/generateText.cjs +3 -3
  37. package/model-function/generate-text/generateText.d.ts +1 -1
  38. package/model-function/generate-text/generateText.js +3 -3
  39. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +8 -1
  40. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.d.ts +5 -0
  41. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +6 -0
  42. package/model-function/generate-text/prompt-template/PromptTemplateProvider.cjs +2 -0
  43. package/model-function/generate-text/prompt-template/PromptTemplateProvider.d.ts +8 -0
  44. package/model-function/generate-text/prompt-template/PromptTemplateProvider.js +1 -0
  45. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +34 -1
  46. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.d.ts +9 -0
  47. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +31 -0
  48. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +28 -0
  49. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +29 -1
  50. package/model-function/generate-text/prompt-template/index.cjs +1 -0
  51. package/model-function/generate-text/prompt-template/index.d.ts +1 -0
  52. package/model-function/generate-text/prompt-template/index.js +1 -0
  53. package/model-function/generate-transcription/TranscriptionEvent.d.ts +1 -1
  54. package/model-function/generate-transcription/TranscriptionModel.d.ts +1 -1
  55. package/model-function/generate-transcription/generateTranscription.cjs +1 -1
  56. package/model-function/generate-transcription/generateTranscription.d.ts +1 -1
  57. package/model-function/generate-transcription/generateTranscription.js +1 -1
  58. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +3 -3
  59. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +1 -1
  60. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +3 -3
  61. package/model-provider/cohere/CohereTextEmbeddingModel.cjs +3 -3
  62. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +1 -1
  63. package/model-provider/cohere/CohereTextEmbeddingModel.js +3 -3
  64. package/model-provider/cohere/CohereTextGenerationModel.cjs +3 -3
  65. package/model-provider/cohere/CohereTextGenerationModel.d.ts +4 -4
  66. package/model-provider/cohere/CohereTextGenerationModel.js +3 -3
  67. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +3 -3
  68. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +1 -1
  69. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +3 -3
  70. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +3 -3
  71. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +4 -4
  72. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +3 -3
  73. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +15 -1
  74. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +4 -0
  75. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +13 -0
  76. package/model-provider/llamacpp/LlamaCppCompletionModel.cjs +37 -27
  77. package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +18 -8
  78. package/model-provider/llamacpp/LlamaCppCompletionModel.js +37 -27
  79. package/model-provider/llamacpp/LlamaCppFacade.cjs +31 -3
  80. package/model-provider/llamacpp/LlamaCppFacade.d.ts +6 -1
  81. package/model-provider/llamacpp/LlamaCppFacade.js +6 -1
  82. package/model-provider/llamacpp/LlamaCppGrammars.cjs +84 -0
  83. package/model-provider/llamacpp/LlamaCppGrammars.d.ts +18 -0
  84. package/model-provider/llamacpp/LlamaCppGrammars.js +81 -0
  85. package/model-provider/llamacpp/LlamaCppPrompt.cjs +59 -0
  86. package/model-provider/llamacpp/LlamaCppPrompt.d.ts +14 -0
  87. package/model-provider/llamacpp/LlamaCppPrompt.js +31 -0
  88. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +3 -3
  89. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +1 -1
  90. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +3 -3
  91. package/model-provider/llamacpp/index.cjs +2 -3
  92. package/model-provider/llamacpp/index.d.ts +1 -2
  93. package/model-provider/llamacpp/index.js +1 -2
  94. package/model-provider/mistral/MistralChatModel.cjs +3 -3
  95. package/model-provider/mistral/MistralChatModel.d.ts +4 -4
  96. package/model-provider/mistral/MistralChatModel.js +3 -3
  97. package/model-provider/mistral/MistralTextEmbeddingModel.cjs +3 -3
  98. package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +1 -1
  99. package/model-provider/mistral/MistralTextEmbeddingModel.js +3 -3
  100. package/model-provider/ollama/OllamaChatModel.cjs +3 -3
  101. package/model-provider/ollama/OllamaChatModel.d.ts +2 -2
  102. package/model-provider/ollama/OllamaChatModel.js +3 -3
  103. package/model-provider/ollama/OllamaCompletionModel.cjs +3 -3
  104. package/model-provider/ollama/OllamaCompletionModel.d.ts +14 -14
  105. package/model-provider/ollama/OllamaCompletionModel.js +3 -3
  106. package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +3 -3
  107. package/model-provider/ollama/OllamaTextEmbeddingModel.d.ts +1 -1
  108. package/model-provider/ollama/OllamaTextEmbeddingModel.js +3 -3
  109. package/model-provider/openai/AbstractOpenAIChatModel.cjs +12 -12
  110. package/model-provider/openai/AbstractOpenAIChatModel.d.ts +6 -6
  111. package/model-provider/openai/AbstractOpenAIChatModel.js +12 -12
  112. package/model-provider/openai/AbstractOpenAICompletionModel.cjs +6 -6
  113. package/model-provider/openai/AbstractOpenAICompletionModel.d.ts +2 -2
  114. package/model-provider/openai/AbstractOpenAICompletionModel.js +6 -6
  115. package/model-provider/openai/OpenAIImageGenerationModel.cjs +3 -3
  116. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +1 -1
  117. package/model-provider/openai/OpenAIImageGenerationModel.js +3 -3
  118. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +3 -3
  119. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +1 -1
  120. package/model-provider/openai/OpenAITextEmbeddingModel.js +3 -3
  121. package/model-provider/openai/OpenAITranscriptionModel.cjs +3 -3
  122. package/model-provider/openai/OpenAITranscriptionModel.d.ts +1 -1
  123. package/model-provider/openai/OpenAITranscriptionModel.js +3 -3
  124. package/model-provider/stability/StabilityImageGenerationModel.cjs +3 -3
  125. package/model-provider/stability/StabilityImageGenerationModel.d.ts +1 -1
  126. package/model-provider/stability/StabilityImageGenerationModel.js +3 -3
  127. package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +3 -3
  128. package/model-provider/whispercpp/WhisperCppTranscriptionModel.d.ts +1 -1
  129. package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +3 -3
  130. package/package.json +1 -1
  131. package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +2 -2
  132. package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +1 -1
  133. package/tool/generate-tool-call/TextGenerationToolCallModel.js +2 -2
  134. package/tool/generate-tool-call/ToolCallGenerationEvent.d.ts +1 -1
  135. package/tool/generate-tool-call/ToolCallGenerationModel.d.ts +1 -1
  136. package/tool/generate-tool-call/generateToolCall.cjs +2 -2
  137. package/tool/generate-tool-call/generateToolCall.js +2 -2
  138. package/tool/generate-tool-calls/TextGenerationToolCallsModel.cjs +2 -2
  139. package/tool/generate-tool-calls/TextGenerationToolCallsModel.d.ts +1 -1
  140. package/tool/generate-tool-calls/TextGenerationToolCallsModel.js +2 -2
  141. package/tool/generate-tool-calls/ToolCallsGenerationEvent.d.ts +1 -1
  142. package/tool/generate-tool-calls/ToolCallsGenerationModel.d.ts +1 -1
  143. package/tool/generate-tool-calls/generateToolCalls.cjs +2 -2
  144. package/tool/generate-tool-calls/generateToolCalls.d.ts +1 -1
  145. package/tool/generate-tool-calls/generateToolCalls.js +2 -2
@@ -0,0 +1,59 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || function (mod) {
19
+ if (mod && mod.__esModule) return mod;
20
+ var result = {};
21
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
+ __setModuleDefault(result, mod);
23
+ return result;
24
+ };
25
+ Object.defineProperty(exports, "__esModule", { value: true });
26
+ exports.BakLLaVA1 = exports.Vicuna = exports.Alpaca = exports.NeuralChat = exports.Llama2 = exports.ChatML = exports.Mistral = exports.Text = exports.asLlamaCppTextPromptTemplateProvider = exports.asLlamaCppPromptTemplate = void 0;
27
+ const alpacaPrompt = __importStar(require("../../model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs"));
28
+ const chatMlPrompt = __importStar(require("../../model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs"));
29
+ const llama2Prompt = __importStar(require("../../model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs"));
30
+ const mistralPrompt = __importStar(require("../../model-function/generate-text/prompt-template/MistralInstructPromptTemplate.cjs"));
31
+ const neuralChatPrompt = __importStar(require("../../model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs"));
32
+ const textPrompt = __importStar(require("../../model-function/generate-text/prompt-template/TextPromptTemplate.cjs"));
33
+ const vicunaPrompt = __importStar(require("../../model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs"));
34
+ const LlamaCppBakLLaVA1Prompt = __importStar(require("./LlamaCppBakLLaVA1PromptTemplate.cjs"));
35
+ function asLlamaCppPromptTemplate(promptTemplate) {
36
+ return {
37
+ format: (prompt) => ({
38
+ text: promptTemplate.format(prompt),
39
+ }),
40
+ stopSequences: promptTemplate.stopSequences,
41
+ };
42
+ }
43
+ exports.asLlamaCppPromptTemplate = asLlamaCppPromptTemplate;
44
+ function asLlamaCppTextPromptTemplateProvider(promptTemplateProvider) {
45
+ return {
46
+ text: () => asLlamaCppPromptTemplate(promptTemplateProvider.text()),
47
+ instruction: () => asLlamaCppPromptTemplate(promptTemplateProvider.instruction()),
48
+ chat: () => asLlamaCppPromptTemplate(promptTemplateProvider.chat()),
49
+ };
50
+ }
51
+ exports.asLlamaCppTextPromptTemplateProvider = asLlamaCppTextPromptTemplateProvider;
52
+ exports.Text = asLlamaCppTextPromptTemplateProvider(textPrompt);
53
+ exports.Mistral = asLlamaCppTextPromptTemplateProvider(mistralPrompt);
54
+ exports.ChatML = asLlamaCppTextPromptTemplateProvider(chatMlPrompt);
55
+ exports.Llama2 = asLlamaCppTextPromptTemplateProvider(llama2Prompt);
56
+ exports.NeuralChat = asLlamaCppTextPromptTemplateProvider(neuralChatPrompt);
57
+ exports.Alpaca = asLlamaCppTextPromptTemplateProvider(alpacaPrompt);
58
+ exports.Vicuna = asLlamaCppTextPromptTemplateProvider(vicunaPrompt);
59
+ exports.BakLLaVA1 = LlamaCppBakLLaVA1Prompt;
@@ -0,0 +1,14 @@
1
+ import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
2
+ import { TextGenerationPromptTemplateProvider } from "../../model-function/generate-text/prompt-template/PromptTemplateProvider.js";
3
+ import * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
4
+ import { LlamaCppCompletionPrompt } from "./LlamaCppCompletionModel.js";
5
+ export declare function asLlamaCppPromptTemplate<SOURCE_PROMPT>(promptTemplate: TextGenerationPromptTemplate<SOURCE_PROMPT, string>): TextGenerationPromptTemplate<SOURCE_PROMPT, LlamaCppCompletionPrompt>;
6
+ export declare function asLlamaCppTextPromptTemplateProvider(promptTemplateProvider: TextGenerationPromptTemplateProvider<string>): TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
7
+ export declare const Text: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
8
+ export declare const Mistral: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
9
+ export declare const ChatML: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
10
+ export declare const Llama2: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
11
+ export declare const NeuralChat: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
12
+ export declare const Alpaca: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
13
+ export declare const Vicuna: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
14
+ export declare const BakLLaVA1: typeof LlamaCppBakLLaVA1Prompt;
@@ -0,0 +1,31 @@
1
+ import * as alpacaPrompt from "../../model-function/generate-text/prompt-template/AlpacaPromptTemplate.js";
2
+ import * as chatMlPrompt from "../../model-function/generate-text/prompt-template/ChatMLPromptTemplate.js";
3
+ import * as llama2Prompt from "../../model-function/generate-text/prompt-template/Llama2PromptTemplate.js";
4
+ import * as mistralPrompt from "../../model-function/generate-text/prompt-template/MistralInstructPromptTemplate.js";
5
+ import * as neuralChatPrompt from "../../model-function/generate-text/prompt-template/NeuralChatPromptTemplate.js";
6
+ import * as textPrompt from "../../model-function/generate-text/prompt-template/TextPromptTemplate.js";
7
+ import * as vicunaPrompt from "../../model-function/generate-text/prompt-template/VicunaPromptTemplate.js";
8
+ import * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
9
+ export function asLlamaCppPromptTemplate(promptTemplate) {
10
+ return {
11
+ format: (prompt) => ({
12
+ text: promptTemplate.format(prompt),
13
+ }),
14
+ stopSequences: promptTemplate.stopSequences,
15
+ };
16
+ }
17
+ export function asLlamaCppTextPromptTemplateProvider(promptTemplateProvider) {
18
+ return {
19
+ text: () => asLlamaCppPromptTemplate(promptTemplateProvider.text()),
20
+ instruction: () => asLlamaCppPromptTemplate(promptTemplateProvider.instruction()),
21
+ chat: () => asLlamaCppPromptTemplate(promptTemplateProvider.chat()),
22
+ };
23
+ }
24
+ export const Text = asLlamaCppTextPromptTemplateProvider(textPrompt);
25
+ export const Mistral = asLlamaCppTextPromptTemplateProvider(mistralPrompt);
26
+ export const ChatML = asLlamaCppTextPromptTemplateProvider(chatMlPrompt);
27
+ export const Llama2 = asLlamaCppTextPromptTemplateProvider(llama2Prompt);
28
+ export const NeuralChat = asLlamaCppTextPromptTemplateProvider(neuralChatPrompt);
29
+ export const Alpaca = asLlamaCppTextPromptTemplateProvider(alpacaPrompt);
30
+ export const Vicuna = asLlamaCppTextPromptTemplateProvider(vicunaPrompt);
31
+ export const BakLLaVA1 = LlamaCppBakLLaVA1Prompt;
@@ -80,10 +80,10 @@ class LlamaCppTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
80
80
  };
81
81
  }
82
82
  async doEmbedValues(texts, options) {
83
- const response = await this.callAPI(texts, options);
83
+ const rawResponse = await this.callAPI(texts, options);
84
84
  return {
85
- response,
86
- embeddings: [response.embedding],
85
+ rawResponse,
86
+ embeddings: [rawResponse.embedding],
87
87
  };
88
88
  }
89
89
  withSettings(additionalSettings) {
@@ -21,7 +21,7 @@ export declare class LlamaCppTextEmbeddingModel extends AbstractModel<LlamaCppTe
21
21
  callAPI(texts: Array<string>, callOptions: FunctionCallOptions): Promise<LlamaCppTextEmbeddingResponse>;
22
22
  get settingsForEvent(): Partial<LlamaCppTextEmbeddingModelSettings>;
23
23
  doEmbedValues(texts: string[], options: FunctionCallOptions): Promise<{
24
- response: {
24
+ rawResponse: {
25
25
  embedding: number[];
26
26
  };
27
27
  embeddings: number[][];
@@ -77,10 +77,10 @@ export class LlamaCppTextEmbeddingModel extends AbstractModel {
77
77
  };
78
78
  }
79
79
  async doEmbedValues(texts, options) {
80
- const response = await this.callAPI(texts, options);
80
+ const rawResponse = await this.callAPI(texts, options);
81
81
  return {
82
- response,
83
- embeddings: [response.embedding],
82
+ rawResponse,
83
+ embeddings: [rawResponse.embedding],
84
84
  };
85
85
  }
86
86
  withSettings(additionalSettings) {
@@ -26,10 +26,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
26
26
  return result;
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.llamacpp = exports.LlamaCppBakLLaVA1Prompt = void 0;
29
+ exports.llamacpp = void 0;
30
30
  __exportStar(require("./LlamaCppApiConfiguration.cjs"), exports);
31
- exports.LlamaCppBakLLaVA1Prompt = __importStar(require("./LlamaCppBakLLaVA1PromptTemplate.cjs"));
31
+ __exportStar(require("./LlamaCppCompletionModel.cjs"), exports);
32
32
  exports.llamacpp = __importStar(require("./LlamaCppFacade.cjs"));
33
33
  __exportStar(require("./LlamaCppTextEmbeddingModel.cjs"), exports);
34
- __exportStar(require("./LlamaCppCompletionModel.cjs"), exports);
35
34
  __exportStar(require("./LlamaCppTokenizer.cjs"), exports);
@@ -1,7 +1,6 @@
1
1
  export * from "./LlamaCppApiConfiguration.js";
2
- export * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
2
+ export * from "./LlamaCppCompletionModel.js";
3
3
  export { LlamaCppErrorData } from "./LlamaCppError.js";
4
4
  export * as llamacpp from "./LlamaCppFacade.js";
5
5
  export * from "./LlamaCppTextEmbeddingModel.js";
6
- export * from "./LlamaCppCompletionModel.js";
7
6
  export * from "./LlamaCppTokenizer.js";
@@ -1,6 +1,5 @@
1
1
  export * from "./LlamaCppApiConfiguration.js";
2
- export * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
2
+ export * from "./LlamaCppCompletionModel.js";
3
3
  export * as llamacpp from "./LlamaCppFacade.js";
4
4
  export * from "./LlamaCppTextEmbeddingModel.js";
5
- export * from "./LlamaCppCompletionModel.js";
6
5
  export * from "./LlamaCppTokenizer.js";
@@ -97,10 +97,10 @@ class MistralChatModel extends AbstractModel_js_1.AbstractModel {
97
97
  schema: (0, ZodSchema_js_1.zodSchema)(mistralChatResponseSchema),
98
98
  }));
99
99
  }
100
- processTextGenerationResponse(response) {
100
+ processTextGenerationResponse(rawResponse) {
101
101
  return {
102
- response,
103
- textGenerationResults: response.choices.map((choice) => ({
102
+ rawResponse,
103
+ textGenerationResults: rawResponse.choices.map((choice) => ({
104
104
  text: choice.message.content,
105
105
  finishReason: this.translateFinishReason(choice.finish_reason),
106
106
  })),
@@ -59,7 +59,7 @@ export declare class MistralChatModel extends AbstractModel<MistralChatModelSett
59
59
  }): Promise<RESULT>;
60
60
  get settingsForEvent(): Partial<MistralChatModelSettings>;
61
61
  doGenerateTexts(prompt: MistralChatPrompt, options: FunctionCallOptions): Promise<{
62
- response: {
62
+ rawResponse: {
63
63
  object: string;
64
64
  model: string;
65
65
  usage: {
@@ -84,7 +84,7 @@ export declare class MistralChatModel extends AbstractModel<MistralChatModelSett
84
84
  }[];
85
85
  }>;
86
86
  restoreGeneratedTexts(rawResponse: unknown): {
87
- response: {
87
+ rawResponse: {
88
88
  object: string;
89
89
  model: string;
90
90
  usage: {
@@ -108,8 +108,8 @@ export declare class MistralChatModel extends AbstractModel<MistralChatModelSett
108
108
  finishReason: TextGenerationFinishReason;
109
109
  }[];
110
110
  };
111
- processTextGenerationResponse(response: MistralChatResponse): {
112
- response: {
111
+ processTextGenerationResponse(rawResponse: MistralChatResponse): {
112
+ rawResponse: {
113
113
  object: string;
114
114
  model: string;
115
115
  usage: {
@@ -94,10 +94,10 @@ export class MistralChatModel extends AbstractModel {
94
94
  schema: zodSchema(mistralChatResponseSchema),
95
95
  }));
96
96
  }
97
- processTextGenerationResponse(response) {
97
+ processTextGenerationResponse(rawResponse) {
98
98
  return {
99
- response,
100
- textGenerationResults: response.choices.map((choice) => ({
99
+ rawResponse,
100
+ textGenerationResults: rawResponse.choices.map((choice) => ({
101
101
  text: choice.message.content,
102
102
  finishReason: this.translateFinishReason(choice.finish_reason),
103
103
  })),
@@ -79,10 +79,10 @@ class MistralTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
79
79
  };
80
80
  }
81
81
  async doEmbedValues(texts, options) {
82
- const response = await this.callAPI(texts, options);
82
+ const rawResponse = await this.callAPI(texts, options);
83
83
  return {
84
- response,
85
- embeddings: response.data.map((entry) => entry.embedding),
84
+ rawResponse,
85
+ embeddings: rawResponse.data.map((entry) => entry.embedding),
86
86
  };
87
87
  }
88
88
  withSettings(additionalSettings) {
@@ -30,7 +30,7 @@ export declare class MistralTextEmbeddingModel extends AbstractModel<MistralText
30
30
  callAPI(texts: Array<string>, callOptions: FunctionCallOptions): Promise<MistralTextEmbeddingResponse>;
31
31
  get settingsForEvent(): Partial<MistralTextEmbeddingModelSettings>;
32
32
  doEmbedValues(texts: string[], options: FunctionCallOptions): Promise<{
33
- response: {
33
+ rawResponse: {
34
34
  object: string;
35
35
  model: string;
36
36
  usage: {
@@ -76,10 +76,10 @@ export class MistralTextEmbeddingModel extends AbstractModel {
76
76
  };
77
77
  }
78
78
  async doEmbedValues(texts, options) {
79
- const response = await this.callAPI(texts, options);
79
+ const rawResponse = await this.callAPI(texts, options);
80
80
  return {
81
- response,
82
- embeddings: response.data.map((entry) => entry.embedding),
81
+ rawResponse,
82
+ embeddings: rawResponse.data.map((entry) => entry.embedding),
83
83
  };
84
84
  }
85
85
  withSettings(additionalSettings) {
@@ -129,12 +129,12 @@ class OllamaChatModel extends AbstractModel_js_1.AbstractModel {
129
129
  schema: (0, ZodSchema_js_1.zodSchema)(ollamaChatResponseSchema),
130
130
  }));
131
131
  }
132
- processTextGenerationResponse(response) {
132
+ processTextGenerationResponse(rawResponse) {
133
133
  return {
134
- response,
134
+ rawResponse,
135
135
  textGenerationResults: [
136
136
  {
137
- text: response.message.content,
137
+ text: rawResponse.message.content,
138
138
  finishReason: "unknown",
139
139
  },
140
140
  ],
@@ -39,7 +39,7 @@ export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettin
39
39
  }): Promise<RESPONSE>;
40
40
  get settingsForEvent(): Partial<OllamaChatModelSettings>;
41
41
  doGenerateTexts(prompt: OllamaChatPrompt, options: FunctionCallOptions): Promise<{
42
- response: {
42
+ rawResponse: {
43
43
  model: string;
44
44
  message: {
45
45
  role: string;
@@ -60,7 +60,7 @@ export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettin
60
60
  }[];
61
61
  }>;
62
62
  restoreGeneratedTexts(rawResponse: unknown): {
63
- response: {
63
+ rawResponse: {
64
64
  model: string;
65
65
  message: {
66
66
  role: string;
@@ -126,12 +126,12 @@ export class OllamaChatModel extends AbstractModel {
126
126
  schema: zodSchema(ollamaChatResponseSchema),
127
127
  }));
128
128
  }
129
- processTextGenerationResponse(response) {
129
+ processTextGenerationResponse(rawResponse) {
130
130
  return {
131
- response,
131
+ rawResponse,
132
132
  textGenerationResults: [
133
133
  {
134
- text: response.message.content,
134
+ text: rawResponse.message.content,
135
135
  finishReason: "unknown",
136
136
  },
137
137
  ],
@@ -130,12 +130,12 @@ class OllamaCompletionModel extends AbstractModel_js_1.AbstractModel {
130
130
  schema: (0, ZodSchema_js_1.zodSchema)(ollamaCompletionResponseSchema),
131
131
  }));
132
132
  }
133
- processTextGenerationResponse(response) {
133
+ processTextGenerationResponse(rawResponse) {
134
134
  return {
135
- response,
135
+ rawResponse,
136
136
  textGenerationResults: [
137
137
  {
138
- text: response.response,
138
+ text: rawResponse.response,
139
139
  finishReason: "unknown",
140
140
  },
141
141
  ],
@@ -52,10 +52,10 @@ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number |
52
52
  }): Promise<RESPONSE>;
53
53
  get settingsForEvent(): Partial<OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>>;
54
54
  doGenerateTexts(prompt: OllamaCompletionPrompt, options: FunctionCallOptions): Promise<{
55
- response: {
56
- response: string;
55
+ rawResponse: {
57
56
  model: string;
58
57
  done: true;
58
+ response: string;
59
59
  created_at: string;
60
60
  total_duration: number;
61
61
  prompt_eval_count: number;
@@ -71,10 +71,10 @@ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number |
71
71
  }[];
72
72
  }>;
73
73
  restoreGeneratedTexts(rawResponse: unknown): {
74
- response: {
75
- response: string;
74
+ rawResponse: {
76
75
  model: string;
77
76
  done: true;
77
+ response: string;
78
78
  created_at: string;
79
79
  total_duration: number;
80
80
  prompt_eval_count: number;
@@ -89,11 +89,11 @@ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number |
89
89
  finishReason: "unknown";
90
90
  }[];
91
91
  };
92
- processTextGenerationResponse(response: OllamaCompletionResponse): {
93
- response: {
94
- response: string;
92
+ processTextGenerationResponse(rawResponse: OllamaCompletionResponse): {
93
+ rawResponse: {
95
94
  model: string;
96
95
  done: true;
96
+ response: string;
97
97
  created_at: string;
98
98
  total_duration: number;
99
99
  prompt_eval_count: number;
@@ -109,9 +109,9 @@ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number |
109
109
  }[];
110
110
  };
111
111
  doStreamText(prompt: OllamaCompletionPrompt, options: FunctionCallOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
112
- response: string;
113
112
  model: string;
114
113
  done: false;
114
+ response: string;
115
115
  created_at: string;
116
116
  } | {
117
117
  model: string;
@@ -151,9 +151,9 @@ declare const ollamaCompletionResponseSchema: z.ZodObject<{
151
151
  eval_duration: z.ZodNumber;
152
152
  context: z.ZodOptional<z.ZodArray<z.ZodNumber, "many">>;
153
153
  }, "strip", z.ZodTypeAny, {
154
- response: string;
155
154
  model: string;
156
155
  done: true;
156
+ response: string;
157
157
  created_at: string;
158
158
  total_duration: number;
159
159
  prompt_eval_count: number;
@@ -163,9 +163,9 @@ declare const ollamaCompletionResponseSchema: z.ZodObject<{
163
163
  prompt_eval_duration?: number | undefined;
164
164
  context?: number[] | undefined;
165
165
  }, {
166
- response: string;
167
166
  model: string;
168
167
  done: true;
168
+ response: string;
169
169
  created_at: string;
170
170
  total_duration: number;
171
171
  prompt_eval_count: number;
@@ -182,14 +182,14 @@ declare const ollamaCompletionStreamChunkSchema: z.ZodDiscriminatedUnion<"done",
182
182
  created_at: z.ZodString;
183
183
  response: z.ZodString;
184
184
  }, "strip", z.ZodTypeAny, {
185
- response: string;
186
185
  model: string;
187
186
  done: false;
187
+ response: string;
188
188
  created_at: string;
189
189
  }, {
190
- response: string;
191
190
  model: string;
192
191
  done: false;
192
+ response: string;
193
193
  created_at: string;
194
194
  }>, z.ZodObject<{
195
195
  done: z.ZodLiteral<true>;
@@ -247,9 +247,9 @@ export declare const OllamaCompletionResponseFormat: {
247
247
  requestBodyValues: unknown;
248
248
  response: Response;
249
249
  }) => Promise<{
250
- response: string;
251
250
  model: string;
252
251
  done: true;
252
+ response: string;
253
253
  created_at: string;
254
254
  total_duration: number;
255
255
  prompt_eval_count: number;
@@ -269,9 +269,9 @@ export declare const OllamaCompletionResponseFormat: {
269
269
  handler: ({ response }: {
270
270
  response: Response;
271
271
  }) => Promise<AsyncIterable<import("../../index.js").Delta<{
272
- response: string;
273
272
  model: string;
274
273
  done: false;
274
+ response: string;
275
275
  created_at: string;
276
276
  } | {
277
277
  model: string;
@@ -127,12 +127,12 @@ export class OllamaCompletionModel extends AbstractModel {
127
127
  schema: zodSchema(ollamaCompletionResponseSchema),
128
128
  }));
129
129
  }
130
- processTextGenerationResponse(response) {
130
+ processTextGenerationResponse(rawResponse) {
131
131
  return {
132
- response,
132
+ rawResponse,
133
133
  textGenerationResults: [
134
134
  {
135
- text: response.response,
135
+ text: rawResponse.response,
136
136
  finishReason: "unknown",
137
137
  },
138
138
  ],
@@ -66,10 +66,10 @@ class OllamaTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
66
66
  };
67
67
  }
68
68
  async doEmbedValues(texts, options) {
69
- const response = await this.callAPI(texts, options);
69
+ const rawResponse = await this.callAPI(texts, options);
70
70
  return {
71
- response,
72
- embeddings: [response.embedding],
71
+ rawResponse,
72
+ embeddings: [rawResponse.embedding],
73
73
  };
74
74
  }
75
75
  withSettings(additionalSettings) {
@@ -19,7 +19,7 @@ export declare class OllamaTextEmbeddingModel extends AbstractModel<OllamaTextEm
19
19
  callAPI(texts: Array<string>, callOptions: FunctionCallOptions): Promise<OllamaTextEmbeddingResponse>;
20
20
  get settingsForEvent(): Partial<OllamaTextEmbeddingModelSettings>;
21
21
  doEmbedValues(texts: string[], options: FunctionCallOptions): Promise<{
22
- response: {
22
+ rawResponse: {
23
23
  embedding: number[];
24
24
  };
25
25
  embeddings: number[][];
@@ -63,10 +63,10 @@ export class OllamaTextEmbeddingModel extends AbstractModel {
63
63
  };
64
64
  }
65
65
  async doEmbedValues(texts, options) {
66
- const response = await this.callAPI(texts, options);
66
+ const rawResponse = await this.callAPI(texts, options);
67
67
  return {
68
- response,
69
- embeddings: [response.embedding],
68
+ rawResponse,
69
+ embeddings: [rawResponse.embedding],
70
70
  };
71
71
  }
72
72
  withSettings(additionalSettings) {
@@ -90,14 +90,14 @@ class AbstractOpenAIChatModel extends AbstractModel_js_1.AbstractModel {
90
90
  schema: (0, ZodSchema_js_1.zodSchema)(openAIChatResponseSchema),
91
91
  }));
92
92
  }
93
- processTextGenerationResponse(response) {
93
+ processTextGenerationResponse(rawResponse) {
94
94
  return {
95
- response,
96
- textGenerationResults: response.choices.map((choice) => ({
95
+ rawResponse,
96
+ textGenerationResults: rawResponse.choices.map((choice) => ({
97
97
  text: choice.message.content ?? "",
98
98
  finishReason: this.translateFinishReason(choice.finish_reason),
99
99
  })),
100
- usage: this.extractUsage(response),
100
+ usage: this.extractUsage(rawResponse),
101
101
  };
102
102
  }
103
103
  translateFinishReason(finishReason) {
@@ -133,7 +133,7 @@ class AbstractOpenAIChatModel extends AbstractModel_js_1.AbstractModel {
133
133
  return firstChoice.delta.content ?? undefined;
134
134
  }
135
135
  async doGenerateToolCall(tool, prompt, options) {
136
- const response = await this.callAPI(prompt, options, {
136
+ const rawResponse = await this.callAPI(prompt, options, {
137
137
  responseFormat: exports.OpenAIChatResponseFormat.json,
138
138
  toolChoice: {
139
139
  type: "function",
@@ -150,20 +150,20 @@ class AbstractOpenAIChatModel extends AbstractModel_js_1.AbstractModel {
150
150
  },
151
151
  ],
152
152
  });
153
- const toolCalls = response.choices[0]?.message.tool_calls;
153
+ const toolCalls = rawResponse.choices[0]?.message.tool_calls;
154
154
  return {
155
- response,
155
+ rawResponse,
156
156
  toolCall: toolCalls == null || toolCalls.length === 0
157
157
  ? null
158
158
  : {
159
159
  id: toolCalls[0].id,
160
160
  args: (0, parseJSON_js_1.parseJSON)({ text: toolCalls[0].function.arguments }),
161
161
  },
162
- usage: this.extractUsage(response),
162
+ usage: this.extractUsage(rawResponse),
163
163
  };
164
164
  }
165
165
  async doGenerateToolCalls(tools, prompt, options) {
166
- const response = await this.callAPI(prompt, options, {
166
+ const rawResponse = await this.callAPI(prompt, options, {
167
167
  responseFormat: exports.OpenAIChatResponseFormat.json,
168
168
  toolChoice: "auto",
169
169
  tools: tools.map((tool) => ({
@@ -175,16 +175,16 @@ class AbstractOpenAIChatModel extends AbstractModel_js_1.AbstractModel {
175
175
  },
176
176
  })),
177
177
  });
178
- const message = response.choices[0]?.message;
178
+ const message = rawResponse.choices[0]?.message;
179
179
  return {
180
- response,
180
+ rawResponse,
181
181
  text: message.content ?? null,
182
182
  toolCalls: message.tool_calls?.map((toolCall) => ({
183
183
  id: toolCall.id,
184
184
  name: toolCall.function.name,
185
185
  args: (0, parseJSON_js_1.parseJSON)({ text: toolCall.function.arguments }),
186
186
  })) ?? null,
187
- usage: this.extractUsage(response),
187
+ usage: this.extractUsage(rawResponse),
188
188
  };
189
189
  }
190
190
  extractUsage(response) {
@@ -90,7 +90,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
90
90
  toolChoice?: AbstractOpenAIChatSettings["toolChoice"];
91
91
  }): Promise<RESULT>;
92
92
  doGenerateTexts(prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<{
93
- response: {
93
+ rawResponse: {
94
94
  object: "chat.completion";
95
95
  model: string;
96
96
  usage: {
@@ -134,7 +134,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
134
134
  };
135
135
  }>;
136
136
  restoreGeneratedTexts(rawResponse: unknown): {
137
- response: {
137
+ rawResponse: {
138
138
  object: "chat.completion";
139
139
  model: string;
140
140
  usage: {
@@ -177,8 +177,8 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
177
177
  totalTokens: number;
178
178
  };
179
179
  };
180
- processTextGenerationResponse(response: OpenAIChatResponse): {
181
- response: {
180
+ processTextGenerationResponse(rawResponse: OpenAIChatResponse): {
181
+ rawResponse: {
182
182
  object: "chat.completion";
183
183
  model: string;
184
184
  usage: {
@@ -251,7 +251,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
251
251
  }>>>;
252
252
  extractTextDelta(delta: unknown): string | undefined;
253
253
  doGenerateToolCall(tool: ToolDefinition<string, unknown>, prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<{
254
- response: {
254
+ rawResponse: {
255
255
  object: "chat.completion";
256
256
  model: string;
257
257
  usage: {
@@ -295,7 +295,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
295
295
  };
296
296
  }>;
297
297
  doGenerateToolCalls(tools: Array<ToolDefinition<string, unknown>>, prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<{
298
- response: {
298
+ rawResponse: {
299
299
  object: "chat.completion";
300
300
  model: string;
301
301
  usage: {