modelfusion 0.117.0 → 0.119.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +60 -0
- package/README.md +10 -9
- package/core/getFunctionCallLogger.cjs +6 -6
- package/core/getFunctionCallLogger.js +6 -6
- package/model-function/ModelCallEvent.d.ts +1 -1
- package/model-function/embed/EmbeddingEvent.d.ts +1 -1
- package/model-function/embed/EmbeddingModel.d.ts +1 -1
- package/model-function/embed/embed.cjs +5 -5
- package/model-function/embed/embed.d.ts +2 -2
- package/model-function/embed/embed.js +5 -5
- package/model-function/executeStandardCall.cjs +3 -3
- package/model-function/executeStandardCall.d.ts +2 -2
- package/model-function/executeStandardCall.js +3 -3
- package/model-function/generate-image/ImageGenerationEvent.d.ts +1 -1
- package/model-function/generate-image/ImageGenerationModel.d.ts +1 -1
- package/model-function/generate-image/PromptTemplateImageGenerationModel.d.ts +1 -1
- package/model-function/generate-image/generateImage.cjs +2 -2
- package/model-function/generate-image/generateImage.d.ts +1 -1
- package/model-function/generate-image/generateImage.js +2 -2
- package/model-function/generate-speech/SpeechGenerationEvent.d.ts +1 -1
- package/model-function/generate-speech/generateSpeech.cjs +2 -2
- package/model-function/generate-speech/generateSpeech.d.ts +1 -1
- package/model-function/generate-speech/generateSpeech.js +2 -2
- package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +10 -1
- package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +1 -0
- package/model-function/generate-structure/StructureFromTextGenerationModel.js +10 -1
- package/model-function/generate-structure/StructureFromTextPromptTemplate.d.ts +12 -1
- package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +1 -22
- package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +0 -5
- package/model-function/generate-structure/StructureFromTextStreamingModel.js +1 -22
- package/model-function/generate-structure/StructureGenerationEvent.d.ts +1 -1
- package/model-function/generate-structure/generateStructure.cjs +2 -2
- package/model-function/generate-structure/generateStructure.d.ts +1 -1
- package/model-function/generate-structure/generateStructure.js +2 -2
- package/model-function/generate-structure/jsonStructurePrompt.cjs +4 -12
- package/model-function/generate-structure/jsonStructurePrompt.js +4 -12
- package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +2 -2
- package/model-function/generate-text/PromptTemplateTextGenerationModel.cjs +6 -0
- package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +5 -2
- package/model-function/generate-text/PromptTemplateTextGenerationModel.js +6 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +6 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +3 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.js +6 -0
- package/model-function/generate-text/TextGenerationEvent.d.ts +1 -1
- package/model-function/generate-text/TextGenerationModel.d.ts +7 -4
- package/model-function/generate-text/generateText.cjs +3 -3
- package/model-function/generate-text/generateText.d.ts +1 -1
- package/model-function/generate-text/generateText.js +3 -3
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +8 -1
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.d.ts +5 -0
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +6 -0
- package/model-function/generate-text/prompt-template/PromptTemplateProvider.cjs +2 -0
- package/model-function/generate-text/prompt-template/PromptTemplateProvider.d.ts +8 -0
- package/model-function/generate-text/prompt-template/PromptTemplateProvider.js +1 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +34 -1
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.d.ts +9 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +31 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +28 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +29 -1
- package/model-function/generate-text/prompt-template/index.cjs +1 -0
- package/model-function/generate-text/prompt-template/index.d.ts +1 -0
- package/model-function/generate-text/prompt-template/index.js +1 -0
- package/model-function/generate-transcription/TranscriptionEvent.d.ts +1 -1
- package/model-function/generate-transcription/TranscriptionModel.d.ts +1 -1
- package/model-function/generate-transcription/generateTranscription.cjs +1 -1
- package/model-function/generate-transcription/generateTranscription.d.ts +1 -1
- package/model-function/generate-transcription/generateTranscription.js +1 -1
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +3 -3
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +1 -1
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +3 -3
- package/model-provider/cohere/CohereTextEmbeddingModel.cjs +3 -3
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +1 -1
- package/model-provider/cohere/CohereTextEmbeddingModel.js +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -3
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +5 -4
- package/model-provider/cohere/CohereTextGenerationModel.js +6 -3
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +3 -3
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +1 -1
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +3 -3
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +6 -3
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +5 -4
- package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +6 -3
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +15 -1
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +4 -0
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +13 -0
- package/model-provider/llamacpp/LlamaCppCompletionModel.cjs +40 -33
- package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +20 -9
- package/model-provider/llamacpp/LlamaCppCompletionModel.js +40 -33
- package/model-provider/llamacpp/LlamaCppFacade.cjs +4 -3
- package/model-provider/llamacpp/LlamaCppFacade.d.ts +2 -1
- package/model-provider/llamacpp/LlamaCppFacade.js +2 -1
- package/model-provider/llamacpp/LlamaCppGrammars.cjs +3 -1
- package/model-provider/llamacpp/LlamaCppGrammars.d.ts +1 -0
- package/model-provider/llamacpp/LlamaCppGrammars.js +1 -0
- package/model-provider/llamacpp/LlamaCppPrompt.cjs +59 -0
- package/model-provider/llamacpp/LlamaCppPrompt.d.ts +14 -0
- package/model-provider/llamacpp/LlamaCppPrompt.js +31 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +3 -3
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +1 -1
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +3 -3
- package/model-provider/llamacpp/convertJsonSchemaToGBNF.cjs +113 -0
- package/model-provider/llamacpp/convertJsonSchemaToGBNF.d.ts +7 -0
- package/model-provider/llamacpp/convertJsonSchemaToGBNF.js +109 -0
- package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.cjs +150 -0
- package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.d.ts +1 -0
- package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.js +148 -0
- package/model-provider/llamacpp/index.cjs +2 -3
- package/model-provider/llamacpp/index.d.ts +1 -2
- package/model-provider/llamacpp/index.js +1 -2
- package/model-provider/mistral/MistralChatModel.cjs +6 -3
- package/model-provider/mistral/MistralChatModel.d.ts +5 -4
- package/model-provider/mistral/MistralChatModel.js +6 -3
- package/model-provider/mistral/MistralTextEmbeddingModel.cjs +3 -3
- package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +1 -1
- package/model-provider/mistral/MistralTextEmbeddingModel.js +3 -3
- package/model-provider/ollama/OllamaChatModel.cjs +3 -3
- package/model-provider/ollama/OllamaChatModel.d.ts +2 -2
- package/model-provider/ollama/OllamaChatModel.js +3 -3
- package/model-provider/ollama/OllamaCompletionModel.cjs +6 -3
- package/model-provider/ollama/OllamaCompletionModel.d.ts +15 -14
- package/model-provider/ollama/OllamaCompletionModel.js +6 -3
- package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +3 -3
- package/model-provider/ollama/OllamaTextEmbeddingModel.d.ts +1 -1
- package/model-provider/ollama/OllamaTextEmbeddingModel.js +3 -3
- package/model-provider/openai/AbstractOpenAIChatModel.cjs +12 -12
- package/model-provider/openai/AbstractOpenAIChatModel.d.ts +6 -6
- package/model-provider/openai/AbstractOpenAIChatModel.js +12 -12
- package/model-provider/openai/AbstractOpenAICompletionModel.cjs +9 -6
- package/model-provider/openai/AbstractOpenAICompletionModel.d.ts +3 -2
- package/model-provider/openai/AbstractOpenAICompletionModel.js +9 -6
- package/model-provider/openai/OpenAIImageGenerationModel.cjs +3 -3
- package/model-provider/openai/OpenAIImageGenerationModel.d.ts +1 -1
- package/model-provider/openai/OpenAIImageGenerationModel.js +3 -3
- package/model-provider/openai/OpenAITextEmbeddingModel.cjs +3 -3
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +1 -1
- package/model-provider/openai/OpenAITextEmbeddingModel.js +3 -3
- package/model-provider/openai/OpenAITranscriptionModel.cjs +3 -3
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +1 -1
- package/model-provider/openai/OpenAITranscriptionModel.js +3 -3
- package/model-provider/stability/StabilityImageGenerationModel.cjs +3 -3
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +1 -1
- package/model-provider/stability/StabilityImageGenerationModel.js +3 -3
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +3 -3
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.d.ts +1 -1
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +3 -3
- package/package.json +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +2 -2
- package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.js +2 -2
- package/tool/generate-tool-call/ToolCallGenerationEvent.d.ts +1 -1
- package/tool/generate-tool-call/ToolCallGenerationModel.d.ts +1 -1
- package/tool/generate-tool-call/generateToolCall.cjs +2 -2
- package/tool/generate-tool-call/generateToolCall.js +2 -2
- package/tool/generate-tool-calls/TextGenerationToolCallsModel.cjs +2 -2
- package/tool/generate-tool-calls/TextGenerationToolCallsModel.d.ts +1 -1
- package/tool/generate-tool-calls/TextGenerationToolCallsModel.js +2 -2
- package/tool/generate-tool-calls/ToolCallsGenerationEvent.d.ts +1 -1
- package/tool/generate-tool-calls/ToolCallsGenerationModel.d.ts +1 -1
- package/tool/generate-tool-calls/generateToolCalls.cjs +2 -2
- package/tool/generate-tool-calls/generateToolCalls.d.ts +1 -1
- package/tool/generate-tool-calls/generateToolCalls.js +2 -2
@@ -5,14 +5,16 @@ import { zodSchema } from "../../core/schema/ZodSchema.js";
|
|
5
5
|
import { parseJSON } from "../../core/schema/parseJSON.js";
|
6
6
|
import { validateTypes } from "../../core/schema/validateTypes.js";
|
7
7
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
8
|
+
import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
|
8
9
|
import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
|
9
10
|
import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
|
10
11
|
import { AsyncQueue } from "../../util/AsyncQueue.js";
|
11
12
|
import { parseEventSourceStream } from "../../util/streaming/parseEventSourceStream.js";
|
12
13
|
import { LlamaCppApiConfiguration } from "./LlamaCppApiConfiguration.js";
|
13
14
|
import { failedLlamaCppCallResponseHandler } from "./LlamaCppError.js";
|
15
|
+
import { Text } from "./LlamaCppPrompt.js";
|
14
16
|
import { LlamaCppTokenizer } from "./LlamaCppTokenizer.js";
|
15
|
-
import {
|
17
|
+
import { convertJsonSchemaToGBNF } from "./convertJsonSchemaToGBNF.js";
|
16
18
|
export class LlamaCppCompletionModel extends AbstractModel {
|
17
19
|
constructor(settings = {}) {
|
18
20
|
super({ settings });
|
@@ -137,23 +139,23 @@ export class LlamaCppCompletionModel extends AbstractModel {
|
|
137
139
|
schema: zodSchema(llamaCppTextGenerationResponseSchema),
|
138
140
|
}));
|
139
141
|
}
|
140
|
-
processTextGenerationResponse(
|
142
|
+
processTextGenerationResponse(rawResponse) {
|
141
143
|
return {
|
142
|
-
|
144
|
+
rawResponse,
|
143
145
|
textGenerationResults: [
|
144
146
|
{
|
145
|
-
text:
|
146
|
-
finishReason:
|
147
|
+
text: rawResponse.content,
|
148
|
+
finishReason: rawResponse.stopped_eos || rawResponse.stopped_word
|
147
149
|
? "stop"
|
148
|
-
:
|
150
|
+
: rawResponse.stopped_limit
|
149
151
|
? "length"
|
150
152
|
: "unknown",
|
151
153
|
},
|
152
154
|
],
|
153
155
|
usage: {
|
154
|
-
promptTokens:
|
155
|
-
completionTokens:
|
156
|
-
totalTokens:
|
156
|
+
promptTokens: rawResponse.tokens_evaluated,
|
157
|
+
completionTokens: rawResponse.tokens_predicted,
|
158
|
+
totalTokens: rawResponse.tokens_evaluated + rawResponse.tokens_predicted,
|
157
159
|
},
|
158
160
|
};
|
159
161
|
}
|
@@ -165,33 +167,38 @@ export class LlamaCppCompletionModel extends AbstractModel {
|
|
165
167
|
extractTextDelta(delta) {
|
166
168
|
return delta.content;
|
167
169
|
}
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
170
|
+
asStructureGenerationModel(promptTemplate) {
|
171
|
+
return "adaptModel" in promptTemplate
|
172
|
+
? new StructureFromTextStreamingModel({
|
173
|
+
model: promptTemplate.adaptModel(this),
|
174
|
+
template: promptTemplate,
|
175
|
+
})
|
176
|
+
: new StructureFromTextStreamingModel({
|
177
|
+
model: this,
|
178
|
+
template: promptTemplate,
|
179
|
+
});
|
173
180
|
}
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
181
|
+
withJsonOutput(schema) {
|
182
|
+
// don't override the grammar if it's already set (to allow user to override)
|
183
|
+
if (this.settings.grammar != null) {
|
184
|
+
return this;
|
185
|
+
}
|
186
|
+
const grammar = convertJsonSchemaToGBNF(schema.getJsonSchema());
|
187
|
+
return this.withSettings({
|
188
|
+
grammar: grammar,
|
180
189
|
});
|
181
190
|
}
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
return
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
promptTemplate,
|
194
|
-
});
|
191
|
+
get promptTemplateProvider() {
|
192
|
+
return this.settings.promptTemplate ?? Text;
|
193
|
+
}
|
194
|
+
withTextPrompt() {
|
195
|
+
return this.withPromptTemplate(this.promptTemplateProvider.text());
|
196
|
+
}
|
197
|
+
withInstructionPrompt() {
|
198
|
+
return this.withPromptTemplate(this.promptTemplateProvider.instruction());
|
199
|
+
}
|
200
|
+
withChatPrompt() {
|
201
|
+
return this.withPromptTemplate(this.promptTemplateProvider.chat());
|
195
202
|
}
|
196
203
|
/**
|
197
204
|
* Maps the prompt for the full Llama.cpp prompt template (incl. image support).
|
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
23
23
|
return result;
|
24
24
|
};
|
25
25
|
Object.defineProperty(exports, "__esModule", { value: true });
|
26
|
-
exports.grammar = exports.Tokenizer = exports.TextEmbedder = exports.
|
26
|
+
exports.prompt = exports.grammar = exports.Tokenizer = exports.TextEmbedder = exports.CompletionTextGenerator = exports.Api = void 0;
|
27
27
|
const LlamaCppApiConfiguration_js_1 = require("./LlamaCppApiConfiguration.cjs");
|
28
28
|
const LlamaCppCompletionModel_js_1 = require("./LlamaCppCompletionModel.cjs");
|
29
29
|
const LlamaCppTextEmbeddingModel_js_1 = require("./LlamaCppTextEmbeddingModel.cjs");
|
@@ -36,10 +36,10 @@ function Api(settings) {
|
|
36
36
|
return new LlamaCppApiConfiguration_js_1.LlamaCppApiConfiguration(settings);
|
37
37
|
}
|
38
38
|
exports.Api = Api;
|
39
|
-
function
|
39
|
+
function CompletionTextGenerator(settings = {}) {
|
40
40
|
return new LlamaCppCompletionModel_js_1.LlamaCppCompletionModel(settings);
|
41
41
|
}
|
42
|
-
exports.
|
42
|
+
exports.CompletionTextGenerator = CompletionTextGenerator;
|
43
43
|
function TextEmbedder(settings = {}) {
|
44
44
|
return new LlamaCppTextEmbeddingModel_js_1.LlamaCppTextEmbeddingModel(settings);
|
45
45
|
}
|
@@ -52,3 +52,4 @@ exports.Tokenizer = Tokenizer;
|
|
52
52
|
* GBNF grammars. You can use them in the `grammar` option of the `TextGenerator` model.
|
53
53
|
*/
|
54
54
|
exports.grammar = __importStar(require("./LlamaCppGrammars.cjs"));
|
55
|
+
exports.prompt = __importStar(require("./LlamaCppPrompt.cjs"));
|
@@ -9,10 +9,11 @@ import { LlamaCppTokenizer } from "./LlamaCppTokenizer.js";
|
|
9
9
|
* It calls the API at http://127.0.0.1:8080 by default.
|
10
10
|
*/
|
11
11
|
export declare function Api(settings: PartialBaseUrlPartsApiConfigurationOptions): LlamaCppApiConfiguration;
|
12
|
-
export declare function
|
12
|
+
export declare function CompletionTextGenerator<CONTEXT_WINDOW_SIZE extends number>(settings?: LlamaCppCompletionModelSettings<CONTEXT_WINDOW_SIZE>): LlamaCppCompletionModel<CONTEXT_WINDOW_SIZE>;
|
13
13
|
export declare function TextEmbedder(settings?: LlamaCppTextEmbeddingModelSettings): LlamaCppTextEmbeddingModel;
|
14
14
|
export declare function Tokenizer(api?: ApiConfiguration): LlamaCppTokenizer;
|
15
15
|
/**
|
16
16
|
* GBNF grammars. You can use them in the `grammar` option of the `TextGenerator` model.
|
17
17
|
*/
|
18
18
|
export * as grammar from "./LlamaCppGrammars.js";
|
19
|
+
export * as prompt from "./LlamaCppPrompt.js";
|
@@ -9,7 +9,7 @@ import { LlamaCppTokenizer } from "./LlamaCppTokenizer.js";
|
|
9
9
|
export function Api(settings) {
|
10
10
|
return new LlamaCppApiConfiguration(settings);
|
11
11
|
}
|
12
|
-
export function
|
12
|
+
export function CompletionTextGenerator(settings = {}) {
|
13
13
|
return new LlamaCppCompletionModel(settings);
|
14
14
|
}
|
15
15
|
export function TextEmbedder(settings = {}) {
|
@@ -22,3 +22,4 @@ export function Tokenizer(api = new LlamaCppApiConfiguration()) {
|
|
22
22
|
* GBNF grammars. You can use them in the `grammar` option of the `TextGenerator` model.
|
23
23
|
*/
|
24
24
|
export * as grammar from "./LlamaCppGrammars.js";
|
25
|
+
export * as prompt from "./LlamaCppPrompt.js";
|
@@ -1,6 +1,6 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.list = exports.jsonArray = exports.json = void 0;
|
3
|
+
exports.fromJsonSchema = exports.list = exports.jsonArray = exports.json = void 0;
|
4
4
|
/**
|
5
5
|
* GBNF grammar for JSON.
|
6
6
|
*
|
@@ -82,3 +82,5 @@ root ::= item+
|
|
82
82
|
# Excludes various line break characters
|
83
83
|
item ::= "- " [^\r\n\x0b\x0c\x85\u2028\u2029]+ "\n"
|
84
84
|
`;
|
85
|
+
var convertJsonSchemaToGBNF_1 = require("./convertJsonSchemaToGBNF");
|
86
|
+
Object.defineProperty(exports, "fromJsonSchema", { enumerable: true, get: function () { return convertJsonSchemaToGBNF_1.convertJsonSchemaToGBNF; } });
|
@@ -0,0 +1,59 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
+
if (k2 === undefined) k2 = k;
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
7
|
+
}
|
8
|
+
Object.defineProperty(o, k2, desc);
|
9
|
+
}) : (function(o, m, k, k2) {
|
10
|
+
if (k2 === undefined) k2 = k;
|
11
|
+
o[k2] = m[k];
|
12
|
+
}));
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
+
}) : function(o, v) {
|
16
|
+
o["default"] = v;
|
17
|
+
});
|
18
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
19
|
+
if (mod && mod.__esModule) return mod;
|
20
|
+
var result = {};
|
21
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
22
|
+
__setModuleDefault(result, mod);
|
23
|
+
return result;
|
24
|
+
};
|
25
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
26
|
+
exports.BakLLaVA1 = exports.Vicuna = exports.Alpaca = exports.NeuralChat = exports.Llama2 = exports.ChatML = exports.Mistral = exports.Text = exports.asLlamaCppTextPromptTemplateProvider = exports.asLlamaCppPromptTemplate = void 0;
|
27
|
+
const alpacaPrompt = __importStar(require("../../model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs"));
|
28
|
+
const chatMlPrompt = __importStar(require("../../model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs"));
|
29
|
+
const llama2Prompt = __importStar(require("../../model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs"));
|
30
|
+
const mistralPrompt = __importStar(require("../../model-function/generate-text/prompt-template/MistralInstructPromptTemplate.cjs"));
|
31
|
+
const neuralChatPrompt = __importStar(require("../../model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs"));
|
32
|
+
const textPrompt = __importStar(require("../../model-function/generate-text/prompt-template/TextPromptTemplate.cjs"));
|
33
|
+
const vicunaPrompt = __importStar(require("../../model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs"));
|
34
|
+
const LlamaCppBakLLaVA1Prompt = __importStar(require("./LlamaCppBakLLaVA1PromptTemplate.cjs"));
|
35
|
+
function asLlamaCppPromptTemplate(promptTemplate) {
|
36
|
+
return {
|
37
|
+
format: (prompt) => ({
|
38
|
+
text: promptTemplate.format(prompt),
|
39
|
+
}),
|
40
|
+
stopSequences: promptTemplate.stopSequences,
|
41
|
+
};
|
42
|
+
}
|
43
|
+
exports.asLlamaCppPromptTemplate = asLlamaCppPromptTemplate;
|
44
|
+
function asLlamaCppTextPromptTemplateProvider(promptTemplateProvider) {
|
45
|
+
return {
|
46
|
+
text: () => asLlamaCppPromptTemplate(promptTemplateProvider.text()),
|
47
|
+
instruction: () => asLlamaCppPromptTemplate(promptTemplateProvider.instruction()),
|
48
|
+
chat: () => asLlamaCppPromptTemplate(promptTemplateProvider.chat()),
|
49
|
+
};
|
50
|
+
}
|
51
|
+
exports.asLlamaCppTextPromptTemplateProvider = asLlamaCppTextPromptTemplateProvider;
|
52
|
+
exports.Text = asLlamaCppTextPromptTemplateProvider(textPrompt);
|
53
|
+
exports.Mistral = asLlamaCppTextPromptTemplateProvider(mistralPrompt);
|
54
|
+
exports.ChatML = asLlamaCppTextPromptTemplateProvider(chatMlPrompt);
|
55
|
+
exports.Llama2 = asLlamaCppTextPromptTemplateProvider(llama2Prompt);
|
56
|
+
exports.NeuralChat = asLlamaCppTextPromptTemplateProvider(neuralChatPrompt);
|
57
|
+
exports.Alpaca = asLlamaCppTextPromptTemplateProvider(alpacaPrompt);
|
58
|
+
exports.Vicuna = asLlamaCppTextPromptTemplateProvider(vicunaPrompt);
|
59
|
+
exports.BakLLaVA1 = LlamaCppBakLLaVA1Prompt;
|
@@ -0,0 +1,14 @@
|
|
1
|
+
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
2
|
+
import { TextGenerationPromptTemplateProvider } from "../../model-function/generate-text/prompt-template/PromptTemplateProvider.js";
|
3
|
+
import * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
|
4
|
+
import { LlamaCppCompletionPrompt } from "./LlamaCppCompletionModel.js";
|
5
|
+
export declare function asLlamaCppPromptTemplate<SOURCE_PROMPT>(promptTemplate: TextGenerationPromptTemplate<SOURCE_PROMPT, string>): TextGenerationPromptTemplate<SOURCE_PROMPT, LlamaCppCompletionPrompt>;
|
6
|
+
export declare function asLlamaCppTextPromptTemplateProvider(promptTemplateProvider: TextGenerationPromptTemplateProvider<string>): TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
|
7
|
+
export declare const Text: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
|
8
|
+
export declare const Mistral: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
|
9
|
+
export declare const ChatML: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
|
10
|
+
export declare const Llama2: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
|
11
|
+
export declare const NeuralChat: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
|
12
|
+
export declare const Alpaca: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
|
13
|
+
export declare const Vicuna: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
|
14
|
+
export declare const BakLLaVA1: typeof LlamaCppBakLLaVA1Prompt;
|
@@ -0,0 +1,31 @@
|
|
1
|
+
import * as alpacaPrompt from "../../model-function/generate-text/prompt-template/AlpacaPromptTemplate.js";
|
2
|
+
import * as chatMlPrompt from "../../model-function/generate-text/prompt-template/ChatMLPromptTemplate.js";
|
3
|
+
import * as llama2Prompt from "../../model-function/generate-text/prompt-template/Llama2PromptTemplate.js";
|
4
|
+
import * as mistralPrompt from "../../model-function/generate-text/prompt-template/MistralInstructPromptTemplate.js";
|
5
|
+
import * as neuralChatPrompt from "../../model-function/generate-text/prompt-template/NeuralChatPromptTemplate.js";
|
6
|
+
import * as textPrompt from "../../model-function/generate-text/prompt-template/TextPromptTemplate.js";
|
7
|
+
import * as vicunaPrompt from "../../model-function/generate-text/prompt-template/VicunaPromptTemplate.js";
|
8
|
+
import * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
|
9
|
+
export function asLlamaCppPromptTemplate(promptTemplate) {
|
10
|
+
return {
|
11
|
+
format: (prompt) => ({
|
12
|
+
text: promptTemplate.format(prompt),
|
13
|
+
}),
|
14
|
+
stopSequences: promptTemplate.stopSequences,
|
15
|
+
};
|
16
|
+
}
|
17
|
+
export function asLlamaCppTextPromptTemplateProvider(promptTemplateProvider) {
|
18
|
+
return {
|
19
|
+
text: () => asLlamaCppPromptTemplate(promptTemplateProvider.text()),
|
20
|
+
instruction: () => asLlamaCppPromptTemplate(promptTemplateProvider.instruction()),
|
21
|
+
chat: () => asLlamaCppPromptTemplate(promptTemplateProvider.chat()),
|
22
|
+
};
|
23
|
+
}
|
24
|
+
export const Text = asLlamaCppTextPromptTemplateProvider(textPrompt);
|
25
|
+
export const Mistral = asLlamaCppTextPromptTemplateProvider(mistralPrompt);
|
26
|
+
export const ChatML = asLlamaCppTextPromptTemplateProvider(chatMlPrompt);
|
27
|
+
export const Llama2 = asLlamaCppTextPromptTemplateProvider(llama2Prompt);
|
28
|
+
export const NeuralChat = asLlamaCppTextPromptTemplateProvider(neuralChatPrompt);
|
29
|
+
export const Alpaca = asLlamaCppTextPromptTemplateProvider(alpacaPrompt);
|
30
|
+
export const Vicuna = asLlamaCppTextPromptTemplateProvider(vicunaPrompt);
|
31
|
+
export const BakLLaVA1 = LlamaCppBakLLaVA1Prompt;
|
@@ -80,10 +80,10 @@ class LlamaCppTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
80
80
|
};
|
81
81
|
}
|
82
82
|
async doEmbedValues(texts, options) {
|
83
|
-
const
|
83
|
+
const rawResponse = await this.callAPI(texts, options);
|
84
84
|
return {
|
85
|
-
|
86
|
-
embeddings: [
|
85
|
+
rawResponse,
|
86
|
+
embeddings: [rawResponse.embedding],
|
87
87
|
};
|
88
88
|
}
|
89
89
|
withSettings(additionalSettings) {
|
@@ -21,7 +21,7 @@ export declare class LlamaCppTextEmbeddingModel extends AbstractModel<LlamaCppTe
|
|
21
21
|
callAPI(texts: Array<string>, callOptions: FunctionCallOptions): Promise<LlamaCppTextEmbeddingResponse>;
|
22
22
|
get settingsForEvent(): Partial<LlamaCppTextEmbeddingModelSettings>;
|
23
23
|
doEmbedValues(texts: string[], options: FunctionCallOptions): Promise<{
|
24
|
-
|
24
|
+
rawResponse: {
|
25
25
|
embedding: number[];
|
26
26
|
};
|
27
27
|
embeddings: number[][];
|
@@ -77,10 +77,10 @@ export class LlamaCppTextEmbeddingModel extends AbstractModel {
|
|
77
77
|
};
|
78
78
|
}
|
79
79
|
async doEmbedValues(texts, options) {
|
80
|
-
const
|
80
|
+
const rawResponse = await this.callAPI(texts, options);
|
81
81
|
return {
|
82
|
-
|
83
|
-
embeddings: [
|
82
|
+
rawResponse,
|
83
|
+
embeddings: [rawResponse.embedding],
|
84
84
|
};
|
85
85
|
}
|
86
86
|
withSettings(additionalSettings) {
|
@@ -0,0 +1,113 @@
|
|
1
|
+
"use strict";
|
2
|
+
/* eslint-disable @typescript-eslint/no-explicit-any */
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
4
|
+
exports.convertJsonSchemaToGBNF = void 0;
|
5
|
+
/**
|
6
|
+
* Convert JSON Schema to a GBNF grammar.
|
7
|
+
*
|
8
|
+
* This is a modified version of
|
9
|
+
* https://github.com/ggerganov/llama.cpp/blob/master/examples/server/public/json-schema-to-grammar.mjs
|
10
|
+
*/
|
11
|
+
function convertJsonSchemaToGBNF(schema) {
|
12
|
+
const rules = new RuleMap();
|
13
|
+
rules.add("space", SPACE_RULE);
|
14
|
+
visit(schema, undefined, rules);
|
15
|
+
return rules.toGBNF();
|
16
|
+
}
|
17
|
+
exports.convertJsonSchemaToGBNF = convertJsonSchemaToGBNF;
|
18
|
+
const SPACE_RULE = '" "?';
|
19
|
+
const PRIMITIVE_RULES = {
|
20
|
+
boolean: '("true" | "false") space',
|
21
|
+
number: '("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? space',
|
22
|
+
integer: '("-"? ([0-9] | [1-9] [0-9]*)) space',
|
23
|
+
string: ` "\\"" ( [^"\\\\] | "\\\\" (["\\\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) )* "\\"" space`,
|
24
|
+
null: '"null" space',
|
25
|
+
};
|
26
|
+
class RuleMap {
|
27
|
+
constructor() {
|
28
|
+
Object.defineProperty(this, "rules", {
|
29
|
+
enumerable: true,
|
30
|
+
configurable: true,
|
31
|
+
writable: true,
|
32
|
+
value: new Map()
|
33
|
+
});
|
34
|
+
}
|
35
|
+
add(name, rule) {
|
36
|
+
const escapedName = this.escapeRuleName(name, rule);
|
37
|
+
this.rules.set(escapedName, rule);
|
38
|
+
return escapedName;
|
39
|
+
}
|
40
|
+
/**
|
41
|
+
* Replace invalid characters in rule name with hyphens.
|
42
|
+
* Disambiguate the name if it already exists.
|
43
|
+
*/
|
44
|
+
escapeRuleName(name, rule) {
|
45
|
+
const baseName = name.replace(/[^\dA-Za-z-]+/g, "-");
|
46
|
+
if (!this.rules.has(baseName) || this.rules.get(baseName) === rule) {
|
47
|
+
return baseName;
|
48
|
+
}
|
49
|
+
let i = 0;
|
50
|
+
while (this.rules.has(`${baseName}${i}`)) {
|
51
|
+
if (this.rules.get(`${baseName}${i}`) === rule) {
|
52
|
+
return `${baseName}${i}`;
|
53
|
+
}
|
54
|
+
i++;
|
55
|
+
}
|
56
|
+
return `${baseName}${i}`;
|
57
|
+
}
|
58
|
+
toGBNF() {
|
59
|
+
return Array.from(this.rules)
|
60
|
+
.map(([name, rule]) => `${name} ::= ${rule}`)
|
61
|
+
.join("\n");
|
62
|
+
}
|
63
|
+
}
|
64
|
+
const GRAMMAR_LITERAL_ESCAPES = {
|
65
|
+
"\r": "\\r",
|
66
|
+
"\n": "\\n",
|
67
|
+
'"': '\\"',
|
68
|
+
};
|
69
|
+
function formatLiteral(literal) {
|
70
|
+
const escaped = JSON.stringify(literal).replace(/[\n\r"]/g, (m) => GRAMMAR_LITERAL_ESCAPES[m]);
|
71
|
+
return `"${escaped}"`;
|
72
|
+
}
|
73
|
+
function visit(schema, name, rules) {
|
74
|
+
const schemaType = schema.type;
|
75
|
+
const ruleName = name || "root";
|
76
|
+
if (schema.oneOf || schema.anyOf) {
|
77
|
+
const rule = (schema.oneOf || schema.anyOf)
|
78
|
+
.map((altSchema, i) => visit(altSchema, `${name}${name ? "-" : ""}${i}`, rules))
|
79
|
+
.join(" | ");
|
80
|
+
return rules.add(ruleName, rule);
|
81
|
+
}
|
82
|
+
else if ("const" in schema) {
|
83
|
+
return rules.add(ruleName, formatLiteral(schema.const));
|
84
|
+
}
|
85
|
+
else if ("enum" in schema) {
|
86
|
+
const rule = schema.enum.map(formatLiteral).join(" | ");
|
87
|
+
return rules.add(ruleName, rule);
|
88
|
+
}
|
89
|
+
else if (schemaType === "object" && "properties" in schema) {
|
90
|
+
const propPairs = Object.entries(schema.properties);
|
91
|
+
let rule = '"{" space';
|
92
|
+
propPairs.forEach(([propName, propSchema], i) => {
|
93
|
+
const propRuleName = visit(propSchema, `${name ?? ""}${name ? "-" : ""}${propName}`, rules);
|
94
|
+
if (i > 0) {
|
95
|
+
rule += ' "," space';
|
96
|
+
}
|
97
|
+
rule += ` ${formatLiteral(propName)} space ":" space ${propRuleName}`;
|
98
|
+
});
|
99
|
+
rule += ' "}" space';
|
100
|
+
return rules.add(ruleName, rule);
|
101
|
+
}
|
102
|
+
else if (schemaType === "array" && "items" in schema) {
|
103
|
+
const itemRuleName = visit(schema.items, `${name ?? ""}${name ? "-" : ""}item`, rules);
|
104
|
+
const rule = `"[" space (${itemRuleName} ("," space ${itemRuleName})*)? "]" space`;
|
105
|
+
return rules.add(ruleName, rule);
|
106
|
+
}
|
107
|
+
else {
|
108
|
+
if (!PRIMITIVE_RULES[schemaType]) {
|
109
|
+
throw new Error(`Unrecognized schema: ${JSON.stringify(schema)}`);
|
110
|
+
}
|
111
|
+
return rules.add(ruleName === "root" ? "root" : schemaType, PRIMITIVE_RULES[schemaType]);
|
112
|
+
}
|
113
|
+
}
|
@@ -0,0 +1,7 @@
|
|
1
|
+
/**
|
2
|
+
* Convert JSON Schema to a GBNF grammar.
|
3
|
+
*
|
4
|
+
* This is a modified version of
|
5
|
+
* https://github.com/ggerganov/llama.cpp/blob/master/examples/server/public/json-schema-to-grammar.mjs
|
6
|
+
*/
|
7
|
+
export declare function convertJsonSchemaToGBNF(schema: unknown): string;
|
@@ -0,0 +1,109 @@
|
|
1
|
+
/* eslint-disable @typescript-eslint/no-explicit-any */
|
2
|
+
/**
|
3
|
+
* Convert JSON Schema to a GBNF grammar.
|
4
|
+
*
|
5
|
+
* This is a modified version of
|
6
|
+
* https://github.com/ggerganov/llama.cpp/blob/master/examples/server/public/json-schema-to-grammar.mjs
|
7
|
+
*/
|
8
|
+
export function convertJsonSchemaToGBNF(schema) {
|
9
|
+
const rules = new RuleMap();
|
10
|
+
rules.add("space", SPACE_RULE);
|
11
|
+
visit(schema, undefined, rules);
|
12
|
+
return rules.toGBNF();
|
13
|
+
}
|
14
|
+
const SPACE_RULE = '" "?';
|
15
|
+
const PRIMITIVE_RULES = {
|
16
|
+
boolean: '("true" | "false") space',
|
17
|
+
number: '("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? space',
|
18
|
+
integer: '("-"? ([0-9] | [1-9] [0-9]*)) space',
|
19
|
+
string: ` "\\"" ( [^"\\\\] | "\\\\" (["\\\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) )* "\\"" space`,
|
20
|
+
null: '"null" space',
|
21
|
+
};
|
22
|
+
class RuleMap {
|
23
|
+
constructor() {
|
24
|
+
Object.defineProperty(this, "rules", {
|
25
|
+
enumerable: true,
|
26
|
+
configurable: true,
|
27
|
+
writable: true,
|
28
|
+
value: new Map()
|
29
|
+
});
|
30
|
+
}
|
31
|
+
add(name, rule) {
|
32
|
+
const escapedName = this.escapeRuleName(name, rule);
|
33
|
+
this.rules.set(escapedName, rule);
|
34
|
+
return escapedName;
|
35
|
+
}
|
36
|
+
/**
|
37
|
+
* Replace invalid characters in rule name with hyphens.
|
38
|
+
* Disambiguate the name if it already exists.
|
39
|
+
*/
|
40
|
+
escapeRuleName(name, rule) {
|
41
|
+
const baseName = name.replace(/[^\dA-Za-z-]+/g, "-");
|
42
|
+
if (!this.rules.has(baseName) || this.rules.get(baseName) === rule) {
|
43
|
+
return baseName;
|
44
|
+
}
|
45
|
+
let i = 0;
|
46
|
+
while (this.rules.has(`${baseName}${i}`)) {
|
47
|
+
if (this.rules.get(`${baseName}${i}`) === rule) {
|
48
|
+
return `${baseName}${i}`;
|
49
|
+
}
|
50
|
+
i++;
|
51
|
+
}
|
52
|
+
return `${baseName}${i}`;
|
53
|
+
}
|
54
|
+
toGBNF() {
|
55
|
+
return Array.from(this.rules)
|
56
|
+
.map(([name, rule]) => `${name} ::= ${rule}`)
|
57
|
+
.join("\n");
|
58
|
+
}
|
59
|
+
}
|
60
|
+
const GRAMMAR_LITERAL_ESCAPES = {
|
61
|
+
"\r": "\\r",
|
62
|
+
"\n": "\\n",
|
63
|
+
'"': '\\"',
|
64
|
+
};
|
65
|
+
function formatLiteral(literal) {
|
66
|
+
const escaped = JSON.stringify(literal).replace(/[\n\r"]/g, (m) => GRAMMAR_LITERAL_ESCAPES[m]);
|
67
|
+
return `"${escaped}"`;
|
68
|
+
}
|
69
|
+
function visit(schema, name, rules) {
|
70
|
+
const schemaType = schema.type;
|
71
|
+
const ruleName = name || "root";
|
72
|
+
if (schema.oneOf || schema.anyOf) {
|
73
|
+
const rule = (schema.oneOf || schema.anyOf)
|
74
|
+
.map((altSchema, i) => visit(altSchema, `${name}${name ? "-" : ""}${i}`, rules))
|
75
|
+
.join(" | ");
|
76
|
+
return rules.add(ruleName, rule);
|
77
|
+
}
|
78
|
+
else if ("const" in schema) {
|
79
|
+
return rules.add(ruleName, formatLiteral(schema.const));
|
80
|
+
}
|
81
|
+
else if ("enum" in schema) {
|
82
|
+
const rule = schema.enum.map(formatLiteral).join(" | ");
|
83
|
+
return rules.add(ruleName, rule);
|
84
|
+
}
|
85
|
+
else if (schemaType === "object" && "properties" in schema) {
|
86
|
+
const propPairs = Object.entries(schema.properties);
|
87
|
+
let rule = '"{" space';
|
88
|
+
propPairs.forEach(([propName, propSchema], i) => {
|
89
|
+
const propRuleName = visit(propSchema, `${name ?? ""}${name ? "-" : ""}${propName}`, rules);
|
90
|
+
if (i > 0) {
|
91
|
+
rule += ' "," space';
|
92
|
+
}
|
93
|
+
rule += ` ${formatLiteral(propName)} space ":" space ${propRuleName}`;
|
94
|
+
});
|
95
|
+
rule += ' "}" space';
|
96
|
+
return rules.add(ruleName, rule);
|
97
|
+
}
|
98
|
+
else if (schemaType === "array" && "items" in schema) {
|
99
|
+
const itemRuleName = visit(schema.items, `${name ?? ""}${name ? "-" : ""}item`, rules);
|
100
|
+
const rule = `"[" space (${itemRuleName} ("," space ${itemRuleName})*)? "]" space`;
|
101
|
+
return rules.add(ruleName, rule);
|
102
|
+
}
|
103
|
+
else {
|
104
|
+
if (!PRIMITIVE_RULES[schemaType]) {
|
105
|
+
throw new Error(`Unrecognized schema: ${JSON.stringify(schema)}`);
|
106
|
+
}
|
107
|
+
return rules.add(ruleName === "root" ? "root" : schemaType, PRIMITIVE_RULES[schemaType]);
|
108
|
+
}
|
109
|
+
}
|