modelfusion 0.104.0 → 0.106.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +60 -0
- package/README.md +8 -10
- package/core/DefaultRun.cjs +0 -4
- package/core/DefaultRun.d.ts +0 -2
- package/core/DefaultRun.js +0 -4
- package/core/ExtensionFunctionEvent.d.ts +11 -0
- package/core/FunctionEvent.d.ts +2 -2
- package/extension/index.cjs +22 -3
- package/extension/index.d.ts +5 -1
- package/extension/index.js +4 -1
- package/index.cjs +0 -3
- package/index.d.ts +0 -3
- package/index.js +0 -3
- package/model-function/Delta.d.ts +1 -2
- package/model-function/executeStreamCall.cjs +6 -4
- package/model-function/executeStreamCall.d.ts +2 -2
- package/model-function/executeStreamCall.js +6 -4
- package/model-function/generate-speech/streamSpeech.cjs +1 -2
- package/model-function/generate-speech/streamSpeech.js +1 -2
- package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +25 -29
- package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +3 -1
- package/model-function/generate-structure/StructureFromTextStreamingModel.js +25 -29
- package/model-function/generate-structure/StructureGenerationModel.d.ts +2 -0
- package/model-function/generate-structure/jsonStructurePrompt.cjs +42 -6
- package/model-function/generate-structure/jsonStructurePrompt.d.ts +12 -1
- package/model-function/generate-structure/jsonStructurePrompt.js +42 -5
- package/model-function/generate-structure/streamStructure.cjs +7 -8
- package/model-function/generate-structure/streamStructure.d.ts +1 -1
- package/model-function/generate-structure/streamStructure.js +7 -8
- package/model-function/generate-text/PromptTemplateFullTextModel.cjs +35 -0
- package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +41 -0
- package/model-function/generate-text/PromptTemplateFullTextModel.js +31 -0
- package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +2 -1
- package/model-function/generate-text/PromptTemplateTextGenerationModel.js +1 -1
- package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +3 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +2 -1
- package/model-function/generate-text/PromptTemplateTextStreamingModel.js +3 -0
- package/model-function/generate-text/TextGenerationModel.d.ts +2 -1
- package/model-function/generate-text/index.cjs +1 -0
- package/model-function/generate-text/index.d.ts +1 -0
- package/model-function/generate-text/index.js +1 -0
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +2 -2
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +1 -1
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs +8 -5
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.js +7 -4
- package/model-function/generate-text/prompt-template/ChatPrompt.cjs +42 -0
- package/model-function/generate-text/prompt-template/ChatPrompt.d.ts +27 -5
- package/model-function/generate-text/prompt-template/ChatPrompt.js +41 -1
- package/model-function/generate-text/prompt-template/{Content.cjs → ContentPart.cjs} +1 -1
- package/model-function/generate-text/prompt-template/ContentPart.d.ts +30 -0
- package/model-function/generate-text/prompt-template/{Content.js → ContentPart.js} +1 -1
- package/model-function/generate-text/prompt-template/InstructionPrompt.d.ts +3 -2
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +8 -5
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.d.ts +1 -1
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +6 -3
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs +8 -4
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/TextPromptTemplate.cjs +8 -4
- package/model-function/generate-text/prompt-template/TextPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +7 -3
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +6 -2
- package/model-function/generate-text/prompt-template/index.cjs +1 -1
- package/model-function/generate-text/prompt-template/index.d.ts +1 -1
- package/model-function/generate-text/prompt-template/index.js +1 -1
- package/model-function/generate-text/streamText.cjs +27 -28
- package/model-function/generate-text/streamText.d.ts +1 -0
- package/model-function/generate-text/streamText.js +27 -28
- package/model-function/index.cjs +0 -1
- package/model-function/index.d.ts +0 -1
- package/model-function/index.js +0 -1
- package/model-provider/anthropic/AnthropicPromptTemplate.cjs +7 -3
- package/model-provider/anthropic/AnthropicPromptTemplate.js +5 -1
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +8 -14
- package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +11 -2
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +8 -14
- package/model-provider/anthropic/AnthropicTextGenerationModel.test.cjs +44 -0
- package/model-provider/anthropic/AnthropicTextGenerationModel.test.js +42 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -44
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +49 -15
- package/model-provider/cohere/CohereTextGenerationModel.js +7 -45
- package/model-provider/cohere/CohereTextGenerationModel.test.cjs +33 -0
- package/model-provider/cohere/CohereTextGenerationModel.test.js +31 -0
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +1 -2
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +1 -2
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +6 -1
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +6 -1
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +7 -14
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +171 -20
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +8 -15
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.cjs +37 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.js +35 -0
- package/model-provider/mistral/MistralChatModel.cjs +30 -104
- package/model-provider/mistral/MistralChatModel.d.ts +47 -14
- package/model-provider/mistral/MistralChatModel.js +30 -104
- package/model-provider/mistral/MistralChatModel.test.cjs +51 -0
- package/model-provider/mistral/MistralChatModel.test.js +49 -0
- package/model-provider/mistral/MistralPromptTemplate.cjs +11 -4
- package/model-provider/mistral/MistralPromptTemplate.js +9 -2
- package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +13 -13
- package/model-provider/ollama/OllamaChatModel.cjs +7 -43
- package/model-provider/ollama/OllamaChatModel.d.ts +67 -14
- package/model-provider/ollama/OllamaChatModel.js +8 -44
- package/model-provider/ollama/OllamaChatModel.test.cjs +27 -0
- package/model-provider/ollama/OllamaChatModel.test.js +25 -0
- package/model-provider/ollama/OllamaChatPromptTemplate.cjs +34 -4
- package/model-provider/ollama/OllamaChatPromptTemplate.js +34 -4
- package/model-provider/ollama/OllamaCompletionModel.cjs +22 -43
- package/model-provider/ollama/OllamaCompletionModel.d.ts +67 -10
- package/model-provider/ollama/OllamaCompletionModel.js +24 -45
- package/model-provider/ollama/OllamaCompletionModel.test.cjs +95 -13
- package/model-provider/ollama/OllamaCompletionModel.test.js +72 -13
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.cjs → AbstractOpenAIChatModel.cjs} +71 -15
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.d.ts → AbstractOpenAIChatModel.d.ts} +273 -19
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.js → AbstractOpenAIChatModel.js} +71 -15
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.cjs → OpenAIChatFunctionCallStructureGenerationModel.cjs} +18 -2
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts → OpenAIChatFunctionCallStructureGenerationModel.d.ts} +41 -11
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.js → OpenAIChatFunctionCallStructureGenerationModel.js} +18 -2
- package/model-provider/openai/{chat/OpenAIChatMessage.d.ts → OpenAIChatMessage.d.ts} +3 -3
- package/model-provider/openai/{chat/OpenAIChatModel.cjs → OpenAIChatModel.cjs} +5 -5
- package/model-provider/openai/{chat/OpenAIChatModel.d.ts → OpenAIChatModel.d.ts} +12 -12
- package/model-provider/openai/{chat/OpenAIChatModel.js → OpenAIChatModel.js} +5 -5
- package/model-provider/openai/OpenAIChatModel.test.cjs +94 -0
- package/model-provider/openai/OpenAIChatModel.test.js +92 -0
- package/model-provider/openai/OpenAIChatPromptTemplate.cjs +114 -0
- package/model-provider/openai/{chat/OpenAIChatPromptTemplate.d.ts → OpenAIChatPromptTemplate.d.ts} +3 -3
- package/model-provider/openai/OpenAIChatPromptTemplate.js +107 -0
- package/model-provider/openai/OpenAICompletionModel.cjs +32 -84
- package/model-provider/openai/OpenAICompletionModel.d.ts +27 -10
- package/model-provider/openai/OpenAICompletionModel.js +33 -85
- package/model-provider/openai/OpenAICompletionModel.test.cjs +53 -0
- package/model-provider/openai/OpenAICompletionModel.test.js +51 -0
- package/model-provider/openai/OpenAIFacade.cjs +2 -2
- package/model-provider/openai/OpenAIFacade.d.ts +3 -3
- package/model-provider/openai/OpenAIFacade.js +2 -2
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +12 -12
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +17 -17
- package/model-provider/openai/TikTokenTokenizer.d.ts +1 -1
- package/model-provider/openai/{chat/countOpenAIChatMessageTokens.cjs → countOpenAIChatMessageTokens.cjs} +2 -2
- package/model-provider/openai/{chat/countOpenAIChatMessageTokens.js → countOpenAIChatMessageTokens.js} +2 -2
- package/model-provider/openai/index.cjs +6 -7
- package/model-provider/openai/index.d.ts +5 -7
- package/model-provider/openai/index.js +5 -6
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +4 -4
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +6 -6
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +4 -4
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +5 -5
- package/package.json +13 -24
- package/test/JsonTestServer.cjs +33 -0
- package/test/JsonTestServer.d.ts +7 -0
- package/test/JsonTestServer.js +29 -0
- package/test/StreamingTestServer.cjs +55 -0
- package/test/StreamingTestServer.d.ts +7 -0
- package/test/StreamingTestServer.js +51 -0
- package/test/arrayFromAsync.cjs +13 -0
- package/test/arrayFromAsync.d.ts +1 -0
- package/test/arrayFromAsync.js +9 -0
- package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.js +1 -1
- package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +1 -11
- package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.d.ts +12 -0
- package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.js +1 -0
- package/tool/generate-tool-calls-or-text/index.cjs +1 -0
- package/tool/generate-tool-calls-or-text/index.d.ts +1 -0
- package/tool/generate-tool-calls-or-text/index.js +1 -0
- package/util/index.cjs +0 -1
- package/util/index.d.ts +0 -1
- package/util/index.js +0 -1
- package/util/streaming/createEventSourceResponseHandler.cjs +9 -0
- package/util/streaming/createEventSourceResponseHandler.d.ts +4 -0
- package/util/streaming/createEventSourceResponseHandler.js +5 -0
- package/util/streaming/createJsonStreamResponseHandler.cjs +9 -0
- package/util/streaming/createJsonStreamResponseHandler.d.ts +4 -0
- package/util/streaming/createJsonStreamResponseHandler.js +5 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.cjs +52 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.d.ts +6 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.js +48 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.cjs +21 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.d.ts +6 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.js +17 -0
- package/browser/MediaSourceAppender.cjs +0 -54
- package/browser/MediaSourceAppender.d.ts +0 -11
- package/browser/MediaSourceAppender.js +0 -50
- package/browser/convertAudioChunksToBase64.cjs +0 -8
- package/browser/convertAudioChunksToBase64.d.ts +0 -4
- package/browser/convertAudioChunksToBase64.js +0 -4
- package/browser/convertBlobToBase64.cjs +0 -23
- package/browser/convertBlobToBase64.d.ts +0 -1
- package/browser/convertBlobToBase64.js +0 -19
- package/browser/index.cjs +0 -22
- package/browser/index.d.ts +0 -6
- package/browser/index.js +0 -6
- package/browser/invokeFlow.cjs +0 -23
- package/browser/invokeFlow.d.ts +0 -8
- package/browser/invokeFlow.js +0 -19
- package/browser/readEventSource.cjs +0 -29
- package/browser/readEventSource.d.ts +0 -9
- package/browser/readEventSource.js +0 -25
- package/browser/readEventSourceStream.cjs +0 -35
- package/browser/readEventSourceStream.d.ts +0 -7
- package/browser/readEventSourceStream.js +0 -31
- package/composed-function/index.cjs +0 -19
- package/composed-function/index.d.ts +0 -3
- package/composed-function/index.js +0 -3
- package/composed-function/summarize/SummarizationFunction.d.ts +0 -4
- package/composed-function/summarize/summarizeRecursively.cjs +0 -19
- package/composed-function/summarize/summarizeRecursively.d.ts +0 -11
- package/composed-function/summarize/summarizeRecursively.js +0 -15
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +0 -25
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +0 -24
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +0 -21
- package/cost/Cost.cjs +0 -38
- package/cost/Cost.d.ts +0 -16
- package/cost/Cost.js +0 -34
- package/cost/CostCalculator.d.ts +0 -8
- package/cost/calculateCost.cjs +0 -28
- package/cost/calculateCost.d.ts +0 -7
- package/cost/calculateCost.js +0 -24
- package/cost/index.cjs +0 -19
- package/cost/index.d.ts +0 -3
- package/cost/index.js +0 -3
- package/guard/GuardEvent.cjs +0 -2
- package/guard/GuardEvent.d.ts +0 -7
- package/guard/fixStructure.cjs +0 -75
- package/guard/fixStructure.d.ts +0 -64
- package/guard/fixStructure.js +0 -71
- package/guard/guard.cjs +0 -79
- package/guard/guard.d.ts +0 -29
- package/guard/guard.js +0 -75
- package/guard/index.cjs +0 -19
- package/guard/index.d.ts +0 -3
- package/guard/index.js +0 -3
- package/model-function/SuccessfulModelCall.cjs +0 -10
- package/model-function/SuccessfulModelCall.d.ts +0 -12
- package/model-function/SuccessfulModelCall.js +0 -6
- package/model-function/generate-text/prompt-template/Content.d.ts +0 -25
- package/model-provider/openai/OpenAICostCalculator.cjs +0 -89
- package/model-provider/openai/OpenAICostCalculator.d.ts +0 -6
- package/model-provider/openai/OpenAICostCalculator.js +0 -85
- package/model-provider/openai/chat/OpenAIChatModel.test.cjs +0 -61
- package/model-provider/openai/chat/OpenAIChatModel.test.js +0 -59
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.cjs +0 -70
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.js +0 -63
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +0 -156
- package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +0 -19
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +0 -152
- package/server/fastify/AssetStorage.cjs +0 -2
- package/server/fastify/AssetStorage.d.ts +0 -17
- package/server/fastify/DefaultFlow.cjs +0 -22
- package/server/fastify/DefaultFlow.d.ts +0 -16
- package/server/fastify/DefaultFlow.js +0 -18
- package/server/fastify/FileSystemAssetStorage.cjs +0 -60
- package/server/fastify/FileSystemAssetStorage.d.ts +0 -19
- package/server/fastify/FileSystemAssetStorage.js +0 -56
- package/server/fastify/FileSystemLogger.cjs +0 -49
- package/server/fastify/FileSystemLogger.d.ts +0 -18
- package/server/fastify/FileSystemLogger.js +0 -45
- package/server/fastify/Flow.cjs +0 -2
- package/server/fastify/Flow.d.ts +0 -9
- package/server/fastify/FlowRun.cjs +0 -71
- package/server/fastify/FlowRun.d.ts +0 -28
- package/server/fastify/FlowRun.js +0 -67
- package/server/fastify/FlowSchema.cjs +0 -2
- package/server/fastify/FlowSchema.d.ts +0 -5
- package/server/fastify/Logger.cjs +0 -2
- package/server/fastify/Logger.d.ts +0 -13
- package/server/fastify/PathProvider.cjs +0 -34
- package/server/fastify/PathProvider.d.ts +0 -12
- package/server/fastify/PathProvider.js +0 -30
- package/server/fastify/index.cjs +0 -24
- package/server/fastify/index.d.ts +0 -8
- package/server/fastify/index.js +0 -8
- package/server/fastify/modelFusionFlowPlugin.cjs +0 -103
- package/server/fastify/modelFusionFlowPlugin.d.ts +0 -12
- package/server/fastify/modelFusionFlowPlugin.js +0 -99
- package/util/getAudioFileExtension.cjs +0 -29
- package/util/getAudioFileExtension.d.ts +0 -1
- package/util/getAudioFileExtension.js +0 -25
- /package/{composed-function/summarize/SummarizationFunction.cjs → core/ExtensionFunctionEvent.cjs} +0 -0
- /package/{composed-function/summarize/SummarizationFunction.js → core/ExtensionFunctionEvent.js} +0 -0
- /package/{cost/CostCalculator.js → model-provider/anthropic/AnthropicTextGenerationModel.test.d.ts} +0 -0
- /package/{guard/GuardEvent.js → model-provider/cohere/CohereTextGenerationModel.test.d.ts} +0 -0
- /package/model-provider/{openai/chat/OpenAIChatModel.test.d.ts → llamacpp/LlamaCppTextGenerationModel.test.d.ts} +0 -0
- /package/{server/fastify/AssetStorage.js → model-provider/mistral/MistralChatModel.test.d.ts} +0 -0
- /package/{server/fastify/Flow.js → model-provider/ollama/OllamaChatModel.test.d.ts} +0 -0
- /package/model-provider/openai/{chat/OpenAIChatMessage.cjs → OpenAIChatMessage.cjs} +0 -0
- /package/model-provider/openai/{chat/OpenAIChatMessage.js → OpenAIChatMessage.js} +0 -0
- /package/{server/fastify/FlowSchema.js → model-provider/openai/OpenAIChatModel.test.d.ts} +0 -0
- /package/{server/fastify/Logger.js → model-provider/openai/OpenAICompletionModel.test.d.ts} +0 -0
- /package/model-provider/openai/{chat/countOpenAIChatMessageTokens.d.ts → countOpenAIChatMessageTokens.d.ts} +0 -0
- /package/{cost/CostCalculator.cjs → tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.cjs} +0 -0
@@ -2,10 +2,46 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.jsonStructurePrompt = void 0;
|
4
4
|
const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
5
|
+
const DEFAULT_SCHEMA_PREFIX = "JSON schema:";
|
6
|
+
const DEFAULT_SCHEMA_SUFFIX = "\nYou MUST answer with a JSON object matches the above schema.";
|
7
|
+
exports.jsonStructurePrompt = {
|
8
|
+
custom(createPrompt) {
|
9
|
+
return { createPrompt, extractStructure };
|
10
|
+
},
|
11
|
+
text({ schemaPrefix, schemaSuffix, } = {}) {
|
12
|
+
return {
|
13
|
+
createPrompt: (prompt, schema) => ({
|
14
|
+
system: createSystemPrompt({ schema, schemaPrefix, schemaSuffix }),
|
15
|
+
instruction: prompt,
|
16
|
+
}),
|
17
|
+
extractStructure,
|
18
|
+
};
|
19
|
+
},
|
20
|
+
instruction({ schemaPrefix, schemaSuffix, } = {}) {
|
21
|
+
return {
|
22
|
+
createPrompt: (prompt, schema) => ({
|
23
|
+
system: createSystemPrompt({
|
24
|
+
originalSystemPrompt: prompt.system,
|
25
|
+
schema,
|
26
|
+
schemaPrefix,
|
27
|
+
schemaSuffix,
|
28
|
+
}),
|
29
|
+
instruction: prompt.instruction,
|
30
|
+
}),
|
31
|
+
extractStructure,
|
32
|
+
};
|
33
|
+
},
|
34
|
+
};
|
35
|
+
function createSystemPrompt({ originalSystemPrompt, schema, schemaPrefix = DEFAULT_SCHEMA_PREFIX, schemaSuffix = DEFAULT_SCHEMA_SUFFIX, }) {
|
36
|
+
return [
|
37
|
+
originalSystemPrompt,
|
38
|
+
schemaPrefix,
|
39
|
+
JSON.stringify(schema.getJsonSchema()),
|
40
|
+
schemaSuffix,
|
41
|
+
]
|
42
|
+
.filter(Boolean)
|
43
|
+
.join("\n");
|
44
|
+
}
|
45
|
+
function extractStructure(response) {
|
46
|
+
return (0, parseJSON_js_1.parseJSON)({ text: response });
|
10
47
|
}
|
11
|
-
exports.jsonStructurePrompt = jsonStructurePrompt;
|
@@ -1,4 +1,15 @@
|
|
1
1
|
import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
|
2
2
|
import { Schema } from "../../core/schema/Schema.js";
|
3
|
+
import { InstructionPrompt } from "../../model-function/generate-text/prompt-template/InstructionPrompt.js";
|
3
4
|
import { StructureFromTextPromptTemplate } from "./StructureFromTextPromptTemplate.js";
|
4
|
-
export declare
|
5
|
+
export declare const jsonStructurePrompt: {
|
6
|
+
custom<SOURCE_PROMPT, TARGET_PROMPT>(createPrompt: (prompt: SOURCE_PROMPT, schema: Schema<unknown> & JsonSchemaProducer) => TARGET_PROMPT): StructureFromTextPromptTemplate<SOURCE_PROMPT, TARGET_PROMPT>;
|
7
|
+
text({ schemaPrefix, schemaSuffix, }?: {
|
8
|
+
schemaPrefix?: string | undefined;
|
9
|
+
schemaSuffix?: string | undefined;
|
10
|
+
}): StructureFromTextPromptTemplate<string, InstructionPrompt>;
|
11
|
+
instruction({ schemaPrefix, schemaSuffix, }?: {
|
12
|
+
schemaPrefix?: string | undefined;
|
13
|
+
schemaSuffix?: string | undefined;
|
14
|
+
}): StructureFromTextPromptTemplate<InstructionPrompt, InstructionPrompt>;
|
15
|
+
};
|
@@ -1,7 +1,44 @@
|
|
1
1
|
import { parseJSON } from "../../core/schema/parseJSON.js";
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
2
|
+
const DEFAULT_SCHEMA_PREFIX = "JSON schema:";
|
3
|
+
const DEFAULT_SCHEMA_SUFFIX = "\nYou MUST answer with a JSON object matches the above schema.";
|
4
|
+
export const jsonStructurePrompt = {
|
5
|
+
custom(createPrompt) {
|
6
|
+
return { createPrompt, extractStructure };
|
7
|
+
},
|
8
|
+
text({ schemaPrefix, schemaSuffix, } = {}) {
|
9
|
+
return {
|
10
|
+
createPrompt: (prompt, schema) => ({
|
11
|
+
system: createSystemPrompt({ schema, schemaPrefix, schemaSuffix }),
|
12
|
+
instruction: prompt,
|
13
|
+
}),
|
14
|
+
extractStructure,
|
15
|
+
};
|
16
|
+
},
|
17
|
+
instruction({ schemaPrefix, schemaSuffix, } = {}) {
|
18
|
+
return {
|
19
|
+
createPrompt: (prompt, schema) => ({
|
20
|
+
system: createSystemPrompt({
|
21
|
+
originalSystemPrompt: prompt.system,
|
22
|
+
schema,
|
23
|
+
schemaPrefix,
|
24
|
+
schemaSuffix,
|
25
|
+
}),
|
26
|
+
instruction: prompt.instruction,
|
27
|
+
}),
|
28
|
+
extractStructure,
|
29
|
+
};
|
30
|
+
},
|
31
|
+
};
|
32
|
+
function createSystemPrompt({ originalSystemPrompt, schema, schemaPrefix = DEFAULT_SCHEMA_PREFIX, schemaSuffix = DEFAULT_SCHEMA_SUFFIX, }) {
|
33
|
+
return [
|
34
|
+
originalSystemPrompt,
|
35
|
+
schemaPrefix,
|
36
|
+
JSON.stringify(schema.getJsonSchema()),
|
37
|
+
schemaSuffix,
|
38
|
+
]
|
39
|
+
.filter(Boolean)
|
40
|
+
.join("\n");
|
41
|
+
}
|
42
|
+
function extractStructure(response) {
|
43
|
+
return parseJSON({ text: response });
|
7
44
|
}
|
@@ -8,8 +8,8 @@ async function streamStructure(model, schema, prompt, options) {
|
|
8
8
|
const expandedPrompt = typeof prompt === "function"
|
9
9
|
? prompt(schema)
|
10
10
|
: prompt;
|
11
|
+
let accumulatedText = "";
|
11
12
|
let lastStructure;
|
12
|
-
let lastFullDelta;
|
13
13
|
const fullResponse = await (0, executeStreamCall_js_1.executeStreamCall)({
|
14
14
|
functionType: "stream-structure",
|
15
15
|
input: prompt,
|
@@ -17,11 +17,14 @@ async function streamStructure(model, schema, prompt, options) {
|
|
17
17
|
options,
|
18
18
|
startStream: async (options) => model.doStreamStructure(schema, expandedPrompt, options),
|
19
19
|
processDelta: (delta) => {
|
20
|
-
const
|
21
|
-
|
20
|
+
const textDelta = model.extractStructureTextDelta(delta.deltaValue);
|
21
|
+
if (textDelta == null) {
|
22
|
+
return undefined;
|
23
|
+
}
|
24
|
+
accumulatedText += textDelta;
|
25
|
+
const latestStructure = model.parseAccumulatedStructureText(accumulatedText);
|
22
26
|
// only send a new part into the stream when the partial structure has changed:
|
23
27
|
if (!(0, isDeepEqualData_js_1.isDeepEqualData)(lastStructure, latestStructure)) {
|
24
|
-
lastFullDelta = latestFullDelta;
|
25
28
|
lastStructure = latestStructure;
|
26
29
|
return {
|
27
30
|
isComplete: false,
|
@@ -42,10 +45,6 @@ async function streamStructure(model, schema, prompt, options) {
|
|
42
45
|
value: parseResult.data,
|
43
46
|
};
|
44
47
|
},
|
45
|
-
getResult: () => ({
|
46
|
-
response: lastFullDelta,
|
47
|
-
value: lastStructure,
|
48
|
-
}),
|
49
48
|
});
|
50
49
|
return options?.fullResponse
|
51
50
|
? {
|
@@ -27,7 +27,7 @@ export type StructureStreamPart<STRUCTURE> = {
|
|
27
27
|
* @example
|
28
28
|
* const structureStream = await streamStructure(
|
29
29
|
* openai.ChatTextGenerator(...).asFunctionCallStructureGenerationModel(...),
|
30
|
-
*
|
30
|
+
* zodSchema(
|
31
31
|
* z.array(
|
32
32
|
* z.object({
|
33
33
|
* name: z.string(),
|
@@ -5,8 +5,8 @@ export async function streamStructure(model, schema, prompt, options) {
|
|
5
5
|
const expandedPrompt = typeof prompt === "function"
|
6
6
|
? prompt(schema)
|
7
7
|
: prompt;
|
8
|
+
let accumulatedText = "";
|
8
9
|
let lastStructure;
|
9
|
-
let lastFullDelta;
|
10
10
|
const fullResponse = await executeStreamCall({
|
11
11
|
functionType: "stream-structure",
|
12
12
|
input: prompt,
|
@@ -14,11 +14,14 @@ export async function streamStructure(model, schema, prompt, options) {
|
|
14
14
|
options,
|
15
15
|
startStream: async (options) => model.doStreamStructure(schema, expandedPrompt, options),
|
16
16
|
processDelta: (delta) => {
|
17
|
-
const
|
18
|
-
|
17
|
+
const textDelta = model.extractStructureTextDelta(delta.deltaValue);
|
18
|
+
if (textDelta == null) {
|
19
|
+
return undefined;
|
20
|
+
}
|
21
|
+
accumulatedText += textDelta;
|
22
|
+
const latestStructure = model.parseAccumulatedStructureText(accumulatedText);
|
19
23
|
// only send a new part into the stream when the partial structure has changed:
|
20
24
|
if (!isDeepEqualData(lastStructure, latestStructure)) {
|
21
|
-
lastFullDelta = latestFullDelta;
|
22
25
|
lastStructure = latestStructure;
|
23
26
|
return {
|
24
27
|
isComplete: false,
|
@@ -39,10 +42,6 @@ export async function streamStructure(model, schema, prompt, options) {
|
|
39
42
|
value: parseResult.data,
|
40
43
|
};
|
41
44
|
},
|
42
|
-
getResult: () => ({
|
43
|
-
response: lastFullDelta,
|
44
|
-
value: lastStructure,
|
45
|
-
}),
|
46
45
|
});
|
47
46
|
return options?.fullResponse
|
48
47
|
? {
|
@@ -0,0 +1,35 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.PromptTemplateFullTextModel = void 0;
|
4
|
+
const PromptTemplateTextStreamingModel_js_1 = require("./PromptTemplateTextStreamingModel.cjs");
|
5
|
+
class PromptTemplateFullTextModel extends PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel {
|
6
|
+
constructor(options) {
|
7
|
+
super(options);
|
8
|
+
}
|
9
|
+
doGenerateToolCall(tool, prompt, options) {
|
10
|
+
const mappedPrompt = this.promptTemplate.format(prompt);
|
11
|
+
return this.model.doGenerateToolCall(tool, mappedPrompt, options);
|
12
|
+
}
|
13
|
+
doGenerateToolCallsOrText(tools, prompt, options) {
|
14
|
+
const mappedPrompt = this.promptTemplate.format(prompt);
|
15
|
+
return this.model.doGenerateToolCallsOrText(tools, mappedPrompt, options);
|
16
|
+
}
|
17
|
+
withPromptTemplate(promptTemplate) {
|
18
|
+
return new PromptTemplateFullTextModel({
|
19
|
+
model: this.withSettings({
|
20
|
+
stopSequences: [
|
21
|
+
...(this.settings.stopSequences ?? []),
|
22
|
+
...promptTemplate.stopSequences,
|
23
|
+
],
|
24
|
+
}),
|
25
|
+
promptTemplate,
|
26
|
+
});
|
27
|
+
}
|
28
|
+
withSettings(additionalSettings) {
|
29
|
+
return new PromptTemplateFullTextModel({
|
30
|
+
model: this.model.withSettings(additionalSettings),
|
31
|
+
promptTemplate: this.promptTemplate,
|
32
|
+
});
|
33
|
+
}
|
34
|
+
}
|
35
|
+
exports.PromptTemplateFullTextModel = PromptTemplateFullTextModel;
|
@@ -0,0 +1,41 @@
|
|
1
|
+
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
2
|
+
import { ToolDefinition } from "../../tool/ToolDefinition.js";
|
3
|
+
import { ToolCallGenerationModel } from "../../tool/generate-tool-call/ToolCallGenerationModel.js";
|
4
|
+
import { ToolCallsOrTextGenerationModel } from "../../tool/generate-tool-calls-or-text/ToolCallsOrTextGenerationModel.js";
|
5
|
+
import { PromptTemplateTextStreamingModel } from "./PromptTemplateTextStreamingModel.js";
|
6
|
+
import { TextGenerationModelSettings, TextStreamingModel } from "./TextGenerationModel.js";
|
7
|
+
import { TextGenerationPromptTemplate } from "./TextGenerationPromptTemplate.js";
|
8
|
+
export declare class PromptTemplateFullTextModel<PROMPT, MODEL_PROMPT, SETTINGS extends TextGenerationModelSettings, MODEL extends TextStreamingModel<MODEL_PROMPT, SETTINGS> & ToolCallGenerationModel<MODEL_PROMPT, SETTINGS> & ToolCallsOrTextGenerationModel<MODEL_PROMPT, SETTINGS>> extends PromptTemplateTextStreamingModel<PROMPT, MODEL_PROMPT, SETTINGS, MODEL> implements TextStreamingModel<PROMPT, SETTINGS>, ToolCallGenerationModel<PROMPT, SETTINGS>, ToolCallsOrTextGenerationModel<PROMPT, SETTINGS> {
|
9
|
+
constructor(options: {
|
10
|
+
model: MODEL;
|
11
|
+
promptTemplate: TextGenerationPromptTemplate<PROMPT, MODEL_PROMPT>;
|
12
|
+
});
|
13
|
+
doGenerateToolCall(tool: ToolDefinition<string, unknown>, prompt: PROMPT, options?: FunctionOptions | undefined): PromiseLike<{
|
14
|
+
response: unknown;
|
15
|
+
toolCall: {
|
16
|
+
id: string;
|
17
|
+
args: unknown;
|
18
|
+
} | null;
|
19
|
+
usage?: {
|
20
|
+
promptTokens: number;
|
21
|
+
completionTokens: number;
|
22
|
+
totalTokens: number;
|
23
|
+
} | undefined;
|
24
|
+
}>;
|
25
|
+
doGenerateToolCallsOrText(tools: ToolDefinition<string, unknown>[], prompt: PROMPT, options?: FunctionOptions | undefined): PromiseLike<{
|
26
|
+
response: unknown;
|
27
|
+
text: string | null;
|
28
|
+
toolCalls: {
|
29
|
+
id: string;
|
30
|
+
name: string;
|
31
|
+
args: unknown;
|
32
|
+
}[] | null;
|
33
|
+
usage?: {
|
34
|
+
promptTokens: number;
|
35
|
+
completionTokens: number;
|
36
|
+
totalTokens: number;
|
37
|
+
} | undefined;
|
38
|
+
}>;
|
39
|
+
withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, PROMPT>): PromptTemplateFullTextModel<INPUT_PROMPT, PROMPT, SETTINGS, this>;
|
40
|
+
withSettings(additionalSettings: Partial<SETTINGS>): this;
|
41
|
+
}
|
@@ -0,0 +1,31 @@
|
|
1
|
+
import { PromptTemplateTextStreamingModel } from "./PromptTemplateTextStreamingModel.js";
|
2
|
+
export class PromptTemplateFullTextModel extends PromptTemplateTextStreamingModel {
|
3
|
+
constructor(options) {
|
4
|
+
super(options);
|
5
|
+
}
|
6
|
+
doGenerateToolCall(tool, prompt, options) {
|
7
|
+
const mappedPrompt = this.promptTemplate.format(prompt);
|
8
|
+
return this.model.doGenerateToolCall(tool, mappedPrompt, options);
|
9
|
+
}
|
10
|
+
doGenerateToolCallsOrText(tools, prompt, options) {
|
11
|
+
const mappedPrompt = this.promptTemplate.format(prompt);
|
12
|
+
return this.model.doGenerateToolCallsOrText(tools, mappedPrompt, options);
|
13
|
+
}
|
14
|
+
withPromptTemplate(promptTemplate) {
|
15
|
+
return new PromptTemplateFullTextModel({
|
16
|
+
model: this.withSettings({
|
17
|
+
stopSequences: [
|
18
|
+
...(this.settings.stopSequences ?? []),
|
19
|
+
...promptTemplate.stopSequences,
|
20
|
+
],
|
21
|
+
}),
|
22
|
+
promptTemplate,
|
23
|
+
});
|
24
|
+
}
|
25
|
+
withSettings(additionalSettings) {
|
26
|
+
return new PromptTemplateFullTextModel({
|
27
|
+
model: this.model.withSettings(additionalSettings),
|
28
|
+
promptTemplate: this.promptTemplate,
|
29
|
+
});
|
30
|
+
}
|
31
|
+
}
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
2
2
|
import { TextGenerationToolCallModel, ToolCallPromptTemplate } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
|
3
|
-
import { TextGenerationToolCallsOrGenerateTextModel
|
3
|
+
import { TextGenerationToolCallsOrGenerateTextModel } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
|
4
|
+
import { ToolCallsOrGenerateTextPromptTemplate } from "../../tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.js";
|
4
5
|
import { StructureFromTextGenerationModel } from "../generate-structure/StructureFromTextGenerationModel.js";
|
5
6
|
import { StructureFromTextPromptTemplate } from "../generate-structure/StructureFromTextPromptTemplate.js";
|
6
7
|
import { TextGenerationModel, TextGenerationModelSettings } from "./TextGenerationModel.js";
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { TextGenerationToolCallModel, } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
|
2
|
-
import { TextGenerationToolCallsOrGenerateTextModel
|
2
|
+
import { TextGenerationToolCallsOrGenerateTextModel } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
|
3
3
|
import { StructureFromTextGenerationModel } from "../generate-structure/StructureFromTextGenerationModel.js";
|
4
4
|
export class PromptTemplateTextGenerationModel {
|
5
5
|
constructor({ model, promptTemplate, }) {
|
@@ -11,6 +11,9 @@ class PromptTemplateTextStreamingModel extends PromptTemplateTextGenerationModel
|
|
11
11
|
const mappedPrompt = this.promptTemplate.format(prompt);
|
12
12
|
return this.model.doStreamText(mappedPrompt, options);
|
13
13
|
}
|
14
|
+
extractTextDelta(delta) {
|
15
|
+
return this.model.extractTextDelta(delta);
|
16
|
+
}
|
14
17
|
asStructureGenerationModel(promptTemplate) {
|
15
18
|
return new StructureFromTextStreamingModel_js_1.StructureFromTextStreamingModel({
|
16
19
|
model: this,
|
@@ -9,7 +9,8 @@ export declare class PromptTemplateTextStreamingModel<PROMPT, MODEL_PROMPT, SETT
|
|
9
9
|
model: MODEL;
|
10
10
|
promptTemplate: TextGenerationPromptTemplate<PROMPT, MODEL_PROMPT>;
|
11
11
|
});
|
12
|
-
doStreamText(prompt: PROMPT, options?: FunctionOptions): PromiseLike<AsyncIterable<import("../Delta.js").Delta<
|
12
|
+
doStreamText(prompt: PROMPT, options?: FunctionOptions): PromiseLike<AsyncIterable<import("../Delta.js").Delta<unknown>>>;
|
13
|
+
extractTextDelta(delta: unknown): string | undefined;
|
13
14
|
asStructureGenerationModel<INPUT_PROMPT>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, PROMPT>): StructureFromTextStreamingModel<INPUT_PROMPT, PROMPT, this>;
|
14
15
|
withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, PROMPT>): PromptTemplateTextStreamingModel<INPUT_PROMPT, PROMPT, SETTINGS, this>;
|
15
16
|
withSettings(additionalSettings: Partial<SETTINGS>): this;
|
@@ -8,6 +8,9 @@ export class PromptTemplateTextStreamingModel extends PromptTemplateTextGenerati
|
|
8
8
|
const mappedPrompt = this.promptTemplate.format(prompt);
|
9
9
|
return this.model.doStreamText(mappedPrompt, options);
|
10
10
|
}
|
11
|
+
extractTextDelta(delta) {
|
12
|
+
return this.model.extractTextDelta(delta);
|
13
|
+
}
|
11
14
|
asStructureGenerationModel(promptTemplate) {
|
12
15
|
return new StructureFromTextStreamingModel({
|
13
16
|
model: this,
|
@@ -74,6 +74,7 @@ export interface TextGenerationModel<PROMPT, SETTINGS extends TextGenerationMode
|
|
74
74
|
withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, PROMPT>): TextGenerationModel<INPUT_PROMPT, SETTINGS>;
|
75
75
|
}
|
76
76
|
export interface TextStreamingModel<PROMPT, SETTINGS extends TextGenerationModelSettings = TextGenerationModelSettings> extends TextGenerationModel<PROMPT, SETTINGS> {
|
77
|
-
doStreamText(prompt: PROMPT, options?: FunctionOptions): PromiseLike<AsyncIterable<Delta<
|
77
|
+
doStreamText(prompt: PROMPT, options?: FunctionOptions): PromiseLike<AsyncIterable<Delta<unknown>>>;
|
78
|
+
extractTextDelta(delta: unknown): string | undefined;
|
78
79
|
withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, PROMPT>): TextStreamingModel<INPUT_PROMPT, SETTINGS>;
|
79
80
|
}
|
@@ -14,6 +14,7 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
14
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
15
15
|
};
|
16
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
17
|
+
__exportStar(require("./PromptTemplateFullTextModel.cjs"), exports);
|
17
18
|
__exportStar(require("./PromptTemplateTextGenerationModel.cjs"), exports);
|
18
19
|
__exportStar(require("./PromptTemplateTextStreamingModel.cjs"), exports);
|
19
20
|
__exportStar(require("./TextGenerationEvent.cjs"), exports);
|
@@ -1,7 +1,7 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.instruction = exports.text = void 0;
|
4
|
-
const
|
4
|
+
const ContentPart_js_1 = require("./ContentPart.cjs");
|
5
5
|
const DEFAULT_SYSTEM_PROMPT_INPUT = "Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.";
|
6
6
|
const DEFAULT_SYSTEM_PROMPT_NO_INPUT = "Below is an instruction that describes a task. Write a response that appropriately completes the request.";
|
7
7
|
/**
|
@@ -68,7 +68,7 @@ function instruction() {
|
|
68
68
|
if (prompt.system != null) {
|
69
69
|
text += `${prompt.system}\n`;
|
70
70
|
}
|
71
|
-
text += (0,
|
71
|
+
text += (0, ContentPart_js_1.validateContentIsString)(prompt.instruction, prompt);
|
72
72
|
if (prompt.input != null) {
|
73
73
|
text += `\n\n### Input:\n${prompt.input}`;
|
74
74
|
}
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { validateContentIsString } from "./
|
1
|
+
import { validateContentIsString } from "./ContentPart.js";
|
2
2
|
const DEFAULT_SYSTEM_PROMPT_INPUT = "Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.";
|
3
3
|
const DEFAULT_SYSTEM_PROMPT_NO_INPUT = "Below is an instruction that describes a task. Write a response that appropriately completes the request.";
|
4
4
|
/**
|
@@ -1,7 +1,8 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.chat = exports.instruction = exports.text = void 0;
|
4
|
-
const
|
4
|
+
const ContentPart_js_1 = require("./ContentPart.cjs");
|
5
|
+
const InvalidPromptError_js_1 = require("./InvalidPromptError.cjs");
|
5
6
|
const START_SEGMENT = "<|im_start|>";
|
6
7
|
const END_SEGMENT = "<|im_end|>";
|
7
8
|
function segmentStart(role) {
|
@@ -40,7 +41,7 @@ function instruction() {
|
|
40
41
|
return {
|
41
42
|
stopSequences: [END_SEGMENT],
|
42
43
|
format(prompt) {
|
43
|
-
const instruction = (0,
|
44
|
+
const instruction = (0, ContentPart_js_1.validateContentIsString)(prompt.instruction, prompt);
|
44
45
|
return (segment("system", prompt.system) +
|
45
46
|
segment("user", instruction) +
|
46
47
|
segmentStart("assistant") +
|
@@ -69,14 +70,16 @@ function chat() {
|
|
69
70
|
for (const { role, content } of prompt.messages) {
|
70
71
|
switch (role) {
|
71
72
|
case "user": {
|
72
|
-
|
73
|
-
text += segment("user", textContent);
|
73
|
+
text += segment("user", (0, ContentPart_js_1.validateContentIsString)(content, prompt));
|
74
74
|
break;
|
75
75
|
}
|
76
76
|
case "assistant": {
|
77
|
-
text += segment("assistant", content);
|
77
|
+
text += segment("assistant", (0, ContentPart_js_1.validateContentIsString)(content, prompt));
|
78
78
|
break;
|
79
79
|
}
|
80
|
+
case "tool": {
|
81
|
+
throw new InvalidPromptError_js_1.InvalidPromptError("Tool messages are not supported.", prompt);
|
82
|
+
}
|
80
83
|
default: {
|
81
84
|
const _exhaustiveCheck = role;
|
82
85
|
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
@@ -1,4 +1,5 @@
|
|
1
|
-
import { validateContentIsString } from "./
|
1
|
+
import { validateContentIsString } from "./ContentPart.js";
|
2
|
+
import { InvalidPromptError } from "./InvalidPromptError.js";
|
2
3
|
const START_SEGMENT = "<|im_start|>";
|
3
4
|
const END_SEGMENT = "<|im_end|>";
|
4
5
|
function segmentStart(role) {
|
@@ -64,14 +65,16 @@ export function chat() {
|
|
64
65
|
for (const { role, content } of prompt.messages) {
|
65
66
|
switch (role) {
|
66
67
|
case "user": {
|
67
|
-
|
68
|
-
text += segment("user", textContent);
|
68
|
+
text += segment("user", validateContentIsString(content, prompt));
|
69
69
|
break;
|
70
70
|
}
|
71
71
|
case "assistant": {
|
72
|
-
text += segment("assistant", content);
|
72
|
+
text += segment("assistant", validateContentIsString(content, prompt));
|
73
73
|
break;
|
74
74
|
}
|
75
|
+
case "tool": {
|
76
|
+
throw new InvalidPromptError("Tool messages are not supported.", prompt);
|
77
|
+
}
|
75
78
|
default: {
|
76
79
|
const _exhaustiveCheck = role;
|
77
80
|
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
@@ -1,2 +1,44 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.ChatMessage = void 0;
|
4
|
+
exports.ChatMessage = {
|
5
|
+
user({ text }) {
|
6
|
+
return {
|
7
|
+
role: "user",
|
8
|
+
content: text,
|
9
|
+
};
|
10
|
+
},
|
11
|
+
tool({ toolResults, }) {
|
12
|
+
return {
|
13
|
+
role: "tool",
|
14
|
+
content: createToolContent({ toolResults }),
|
15
|
+
};
|
16
|
+
},
|
17
|
+
assistant({ text, toolResults, }) {
|
18
|
+
return {
|
19
|
+
role: "assistant",
|
20
|
+
content: createAssistantContent({ text, toolResults }),
|
21
|
+
};
|
22
|
+
},
|
23
|
+
};
|
24
|
+
function createToolContent({ toolResults, }) {
|
25
|
+
const toolContent = [];
|
26
|
+
for (const { result, toolCall } of toolResults ?? []) {
|
27
|
+
toolContent.push({
|
28
|
+
type: "tool-response",
|
29
|
+
id: toolCall.id,
|
30
|
+
response: result,
|
31
|
+
});
|
32
|
+
}
|
33
|
+
return toolContent;
|
34
|
+
}
|
35
|
+
function createAssistantContent({ text, toolResults, }) {
|
36
|
+
const content = [];
|
37
|
+
if (text != null) {
|
38
|
+
content.push({ type: "text", text });
|
39
|
+
}
|
40
|
+
for (const { toolCall } of toolResults ?? []) {
|
41
|
+
content.push({ type: "tool-call", ...toolCall });
|
42
|
+
}
|
43
|
+
return content;
|
44
|
+
}
|
@@ -1,10 +1,14 @@
|
|
1
|
-
import {
|
1
|
+
import { ToolCallResult } from "../../../tool/ToolCallResult.js";
|
2
|
+
import { ImagePart, TextPart, ToolCallPart, ToolResponsePart } from "./ContentPart.js";
|
2
3
|
/**
|
3
|
-
* A chat prompt is a combination of a system message and a list
|
4
|
+
* A chat prompt is a combination of a system message and a list
|
5
|
+
* of user, assistant, and tool messages.
|
4
6
|
*
|
5
7
|
* The user messages can contain multi-modal content.
|
8
|
+
* The assistant messages can contain tool calls.
|
6
9
|
*
|
7
|
-
* Note: Not all models and prompt formats support multi-modal inputs.
|
10
|
+
* Note: Not all models and prompt formats support multi-modal inputs and tool calls.
|
11
|
+
* The validation happens at runtime.
|
8
12
|
*
|
9
13
|
* @example
|
10
14
|
* ```ts
|
@@ -22,6 +26,9 @@ export interface ChatPrompt {
|
|
22
26
|
system?: string;
|
23
27
|
messages: Array<ChatMessage>;
|
24
28
|
}
|
29
|
+
export type UserContent = string | Array<TextPart | ImagePart>;
|
30
|
+
export type AssistantContent = string | Array<TextPart | ToolCallPart>;
|
31
|
+
export type ToolContent = Array<ToolResponsePart>;
|
25
32
|
/**
|
26
33
|
* A message in a chat prompt.
|
27
34
|
*
|
@@ -29,8 +36,23 @@ export interface ChatPrompt {
|
|
29
36
|
*/
|
30
37
|
export type ChatMessage = {
|
31
38
|
role: "user";
|
32
|
-
content:
|
39
|
+
content: UserContent;
|
33
40
|
} | {
|
34
41
|
role: "assistant";
|
35
|
-
content:
|
42
|
+
content: AssistantContent;
|
43
|
+
} | {
|
44
|
+
role: "tool";
|
45
|
+
content: ToolContent;
|
46
|
+
};
|
47
|
+
export declare const ChatMessage: {
|
48
|
+
user({ text }: {
|
49
|
+
text: string;
|
50
|
+
}): ChatMessage;
|
51
|
+
tool({ toolResults, }: {
|
52
|
+
toolResults: ToolCallResult<string, unknown, unknown>[] | null;
|
53
|
+
}): ChatMessage;
|
54
|
+
assistant({ text, toolResults, }: {
|
55
|
+
text: string | null;
|
56
|
+
toolResults: ToolCallResult<string, unknown, unknown>[] | null;
|
57
|
+
}): ChatMessage;
|
36
58
|
};
|
@@ -1 +1,41 @@
|
|
1
|
-
export {
|
1
|
+
export const ChatMessage = {
|
2
|
+
user({ text }) {
|
3
|
+
return {
|
4
|
+
role: "user",
|
5
|
+
content: text,
|
6
|
+
};
|
7
|
+
},
|
8
|
+
tool({ toolResults, }) {
|
9
|
+
return {
|
10
|
+
role: "tool",
|
11
|
+
content: createToolContent({ toolResults }),
|
12
|
+
};
|
13
|
+
},
|
14
|
+
assistant({ text, toolResults, }) {
|
15
|
+
return {
|
16
|
+
role: "assistant",
|
17
|
+
content: createAssistantContent({ text, toolResults }),
|
18
|
+
};
|
19
|
+
},
|
20
|
+
};
|
21
|
+
function createToolContent({ toolResults, }) {
|
22
|
+
const toolContent = [];
|
23
|
+
for (const { result, toolCall } of toolResults ?? []) {
|
24
|
+
toolContent.push({
|
25
|
+
type: "tool-response",
|
26
|
+
id: toolCall.id,
|
27
|
+
response: result,
|
28
|
+
});
|
29
|
+
}
|
30
|
+
return toolContent;
|
31
|
+
}
|
32
|
+
function createAssistantContent({ text, toolResults, }) {
|
33
|
+
const content = [];
|
34
|
+
if (text != null) {
|
35
|
+
content.push({ type: "text", text });
|
36
|
+
}
|
37
|
+
for (const { toolCall } of toolResults ?? []) {
|
38
|
+
content.push({ type: "tool-call", ...toolCall });
|
39
|
+
}
|
40
|
+
return content;
|
41
|
+
}
|
@@ -4,7 +4,7 @@ exports.validateContentIsString = void 0;
|
|
4
4
|
const InvalidPromptError_js_1 = require("./InvalidPromptError.cjs");
|
5
5
|
function validateContentIsString(content, prompt) {
|
6
6
|
if (typeof content !== "string") {
|
7
|
-
throw new InvalidPromptError_js_1.InvalidPromptError("
|
7
|
+
throw new InvalidPromptError_js_1.InvalidPromptError("Only text prompts are are supported by this prompt template.", prompt);
|
8
8
|
}
|
9
9
|
return content;
|
10
10
|
}
|