modelfusion 0.105.0 → 0.107.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +26 -0
- package/README.md +16 -59
- package/core/DefaultRun.cjs +0 -4
- package/core/DefaultRun.d.ts +0 -2
- package/core/DefaultRun.js +0 -4
- package/core/ExtensionFunctionEvent.d.ts +11 -0
- package/core/FunctionEvent.d.ts +2 -2
- package/extension/index.cjs +22 -3
- package/extension/index.d.ts +5 -1
- package/extension/index.js +4 -1
- package/index.cjs +0 -3
- package/index.d.ts +0 -3
- package/index.js +0 -3
- package/model-function/generate-structure/jsonStructurePrompt.cjs +42 -6
- package/model-function/generate-structure/jsonStructurePrompt.d.ts +12 -1
- package/model-function/generate-structure/jsonStructurePrompt.js +42 -5
- package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +2 -1
- package/model-function/generate-text/PromptTemplateTextGenerationModel.js +1 -1
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +10 -8
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.d.ts +1 -1
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +10 -8
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.cjs +150 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.d.ts +62 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.js +143 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.cjs +60 -0
- package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.js +58 -0
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/TextPromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/TextPromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +11 -0
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +11 -0
- package/model-function/generate-text/prompt-template/index.cjs +2 -1
- package/model-function/generate-text/prompt-template/index.d.ts +1 -0
- package/model-function/generate-text/prompt-template/index.js +1 -0
- package/model-function/index.cjs +0 -1
- package/model-function/index.d.ts +0 -1
- package/model-function/index.js +0 -1
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +6 -6
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +3 -3
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.cjs → LlamaCppCompletionModel.cjs} +8 -8
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.d.ts → LlamaCppCompletionModel.d.ts} +49 -49
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.js → LlamaCppCompletionModel.js} +6 -6
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.test.cjs → LlamaCppCompletionModel.test.cjs} +3 -3
- package/model-provider/llamacpp/{LlamaCppTextGenerationModel.test.js → LlamaCppCompletionModel.test.js} +3 -3
- package/model-provider/llamacpp/LlamaCppFacade.cjs +2 -2
- package/model-provider/llamacpp/LlamaCppFacade.d.ts +2 -2
- package/model-provider/llamacpp/LlamaCppFacade.js +2 -2
- package/model-provider/llamacpp/index.cjs +1 -1
- package/model-provider/llamacpp/index.d.ts +1 -1
- package/model-provider/llamacpp/index.js +1 -1
- package/model-provider/mistral/MistralChatModel.cjs +4 -4
- package/model-provider/mistral/MistralChatModel.d.ts +6 -6
- package/model-provider/mistral/MistralChatModel.js +1 -1
- package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +13 -13
- package/model-provider/mistral/index.cjs +3 -3
- package/model-provider/mistral/index.d.ts +2 -2
- package/model-provider/mistral/index.js +2 -2
- package/model-provider/ollama/OllamaChatModel.d.ts +9 -8
- package/model-provider/ollama/OllamaChatModel.js +1 -1
- package/model-provider/ollama/OllamaCompletionModel.d.ts +2 -1
- package/model-provider/ollama/OllamaCompletionModel.js +1 -1
- package/model-provider/ollama/OllamaCompletionModel.test.cjs +1 -7
- package/model-provider/ollama/OllamaCompletionModel.test.js +1 -7
- package/model-provider/openai/AbstractOpenAIChatModel.d.ts +8 -8
- package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.d.ts +1 -1
- package/model-provider/openai/OpenAICompletionModel.d.ts +6 -6
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +12 -12
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +11 -11
- package/model-provider/openai/index.cjs +0 -1
- package/model-provider/openai/index.d.ts +0 -1
- package/model-provider/openai/index.js +0 -1
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +5 -5
- package/package.json +9 -20
- package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +1 -1
- package/tool/generate-tool-call/TextGenerationToolCallModel.js +1 -1
- package/tool/generate-tool-call/index.cjs +1 -0
- package/tool/generate-tool-call/index.d.ts +1 -0
- package/tool/generate-tool-call/index.js +1 -0
- package/tool/generate-tool-call/jsonToolCallPrompt.cjs +30 -0
- package/tool/generate-tool-call/jsonToolCallPrompt.d.ts +5 -0
- package/tool/generate-tool-call/jsonToolCallPrompt.js +27 -0
- package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +1 -11
- package/tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.d.ts +12 -0
- package/tool/generate-tool-calls-or-text/index.cjs +1 -0
- package/tool/generate-tool-calls-or-text/index.d.ts +1 -0
- package/tool/generate-tool-calls-or-text/index.js +1 -0
- package/util/index.cjs +0 -1
- package/util/index.d.ts +0 -1
- package/util/index.js +0 -1
- package/browser/MediaSourceAppender.cjs +0 -54
- package/browser/MediaSourceAppender.d.ts +0 -11
- package/browser/MediaSourceAppender.js +0 -50
- package/browser/convertAudioChunksToBase64.cjs +0 -8
- package/browser/convertAudioChunksToBase64.d.ts +0 -4
- package/browser/convertAudioChunksToBase64.js +0 -4
- package/browser/convertBlobToBase64.cjs +0 -23
- package/browser/convertBlobToBase64.d.ts +0 -1
- package/browser/convertBlobToBase64.js +0 -19
- package/browser/index.cjs +0 -22
- package/browser/index.d.ts +0 -6
- package/browser/index.js +0 -6
- package/browser/invokeFlow.cjs +0 -23
- package/browser/invokeFlow.d.ts +0 -8
- package/browser/invokeFlow.js +0 -19
- package/browser/readEventSource.cjs +0 -29
- package/browser/readEventSource.d.ts +0 -9
- package/browser/readEventSource.js +0 -25
- package/browser/readEventSourceStream.cjs +0 -35
- package/browser/readEventSourceStream.d.ts +0 -7
- package/browser/readEventSourceStream.js +0 -31
- package/composed-function/index.cjs +0 -19
- package/composed-function/index.d.ts +0 -3
- package/composed-function/index.js +0 -3
- package/composed-function/summarize/SummarizationFunction.d.ts +0 -4
- package/composed-function/summarize/summarizeRecursively.cjs +0 -19
- package/composed-function/summarize/summarizeRecursively.d.ts +0 -11
- package/composed-function/summarize/summarizeRecursively.js +0 -15
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.cjs +0 -25
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +0 -24
- package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.js +0 -21
- package/cost/Cost.cjs +0 -38
- package/cost/Cost.d.ts +0 -16
- package/cost/Cost.js +0 -34
- package/cost/CostCalculator.d.ts +0 -8
- package/cost/calculateCost.cjs +0 -28
- package/cost/calculateCost.d.ts +0 -7
- package/cost/calculateCost.js +0 -24
- package/cost/index.cjs +0 -19
- package/cost/index.d.ts +0 -3
- package/cost/index.js +0 -3
- package/guard/GuardEvent.cjs +0 -2
- package/guard/GuardEvent.d.ts +0 -7
- package/guard/fixStructure.cjs +0 -75
- package/guard/fixStructure.d.ts +0 -64
- package/guard/fixStructure.js +0 -71
- package/guard/guard.cjs +0 -79
- package/guard/guard.d.ts +0 -29
- package/guard/guard.js +0 -75
- package/guard/index.cjs +0 -19
- package/guard/index.d.ts +0 -3
- package/guard/index.js +0 -3
- package/model-function/SuccessfulModelCall.cjs +0 -10
- package/model-function/SuccessfulModelCall.d.ts +0 -12
- package/model-function/SuccessfulModelCall.js +0 -6
- package/model-provider/openai/OpenAICostCalculator.cjs +0 -89
- package/model-provider/openai/OpenAICostCalculator.d.ts +0 -6
- package/model-provider/openai/OpenAICostCalculator.js +0 -85
- package/server/fastify/AssetStorage.cjs +0 -2
- package/server/fastify/AssetStorage.d.ts +0 -17
- package/server/fastify/AssetStorage.js +0 -1
- package/server/fastify/DefaultFlow.cjs +0 -22
- package/server/fastify/DefaultFlow.d.ts +0 -16
- package/server/fastify/DefaultFlow.js +0 -18
- package/server/fastify/FileSystemAssetStorage.cjs +0 -60
- package/server/fastify/FileSystemAssetStorage.d.ts +0 -19
- package/server/fastify/FileSystemAssetStorage.js +0 -56
- package/server/fastify/FileSystemLogger.cjs +0 -49
- package/server/fastify/FileSystemLogger.d.ts +0 -18
- package/server/fastify/FileSystemLogger.js +0 -45
- package/server/fastify/Flow.cjs +0 -2
- package/server/fastify/Flow.d.ts +0 -9
- package/server/fastify/Flow.js +0 -1
- package/server/fastify/FlowRun.cjs +0 -71
- package/server/fastify/FlowRun.d.ts +0 -28
- package/server/fastify/FlowRun.js +0 -67
- package/server/fastify/FlowSchema.cjs +0 -2
- package/server/fastify/FlowSchema.d.ts +0 -5
- package/server/fastify/FlowSchema.js +0 -1
- package/server/fastify/Logger.cjs +0 -2
- package/server/fastify/Logger.d.ts +0 -13
- package/server/fastify/Logger.js +0 -1
- package/server/fastify/PathProvider.cjs +0 -34
- package/server/fastify/PathProvider.d.ts +0 -12
- package/server/fastify/PathProvider.js +0 -30
- package/server/fastify/index.cjs +0 -24
- package/server/fastify/index.d.ts +0 -8
- package/server/fastify/index.js +0 -8
- package/server/fastify/modelFusionFlowPlugin.cjs +0 -103
- package/server/fastify/modelFusionFlowPlugin.d.ts +0 -12
- package/server/fastify/modelFusionFlowPlugin.js +0 -99
- package/util/getAudioFileExtension.cjs +0 -29
- package/util/getAudioFileExtension.d.ts +0 -1
- package/util/getAudioFileExtension.js +0 -25
- /package/{composed-function/summarize/SummarizationFunction.cjs → core/ExtensionFunctionEvent.cjs} +0 -0
- /package/{composed-function/summarize/SummarizationFunction.js → core/ExtensionFunctionEvent.js} +0 -0
- /package/{cost/CostCalculator.js → model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.d.ts} +0 -0
- /package/{guard/GuardEvent.js → model-provider/llamacpp/LlamaCppCompletionModel.test.d.ts} +0 -0
- /package/model-provider/mistral/{MistralPromptTemplate.cjs → MistralChatPromptTemplate.cjs} +0 -0
- /package/model-provider/mistral/{MistralPromptTemplate.d.ts → MistralChatPromptTemplate.d.ts} +0 -0
- /package/model-provider/mistral/{MistralPromptTemplate.js → MistralChatPromptTemplate.js} +0 -0
- /package/{cost/CostCalculator.cjs → tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.cjs} +0 -0
- /package/{model-provider/llamacpp/LlamaCppTextGenerationModel.test.d.ts → tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.js} +0 -0
package/CHANGELOG.md
CHANGED
@@ -1,5 +1,31 @@
|
|
1
1
|
# Changelog
|
2
2
|
|
3
|
+
## v0.107.0 - 2023-12-29
|
4
|
+
|
5
|
+
### Added
|
6
|
+
|
7
|
+
- Mistral instruct prompt template
|
8
|
+
|
9
|
+
### Changed
|
10
|
+
|
11
|
+
- **breaking change**: Renamed `LlamaCppTextGenerationModel` to `LlamaCppCompletionModel`.
|
12
|
+
|
13
|
+
### Fixed
|
14
|
+
|
15
|
+
- Updated `LlamaCppCompletionModel` to the latest llama.cpp version.
|
16
|
+
- Fixed formatting of system prompt for chats in Llama2 2 prompt template.
|
17
|
+
|
18
|
+
## v0.106.0 - 2023-12-28
|
19
|
+
|
20
|
+
Experimental features that are unlikely to become stable before v1.0 have been moved to a separate `modelfusion-experimental` package.
|
21
|
+
|
22
|
+
### Removed
|
23
|
+
|
24
|
+
- Cost calculation
|
25
|
+
- `guard` function
|
26
|
+
- Browser and server features (incl. flow)
|
27
|
+
- `summarizeRecursively` function
|
28
|
+
|
3
29
|
## v0.105.0 - 2023-12-26
|
4
30
|
|
5
31
|
### Added
|
package/README.md
CHANGED
@@ -12,19 +12,17 @@
|
|
12
12
|
|
13
13
|
## Introduction
|
14
14
|
|
15
|
-
**ModelFusion** is
|
15
|
+
**ModelFusion** is an abstraction layer for integrating AI models into JavaScript and TypeScript applications, unifying the API for common operations such as text streaming, structure generation, and tool usage. It provides features to support production environments, including observability hooks, logging, and automatic retries. You can use ModelFusion to build AI applications, chatbots, and agents.
|
16
16
|
|
17
17
|
- **Vendor-neutral**: ModelFusion is a non-commercial open source project that is community-driven. You can use it with any supported provider.
|
18
18
|
- **Multi-modal**: ModelFusion supports a wide range of models including text generation, image generation, vision, text-to-speech, speech-to-text, and embedding models.
|
19
|
-
- **
|
20
|
-
- **Utility functions**: ModelFusion provides functionality for tools and tool usage, vector indices, and guards functions.
|
21
|
-
- **Type inference and validation**: ModelFusion infers TypeScript types wherever possible and to validates model responses.
|
19
|
+
- **Type inference and validation**: ModelFusion infers TypeScript types wherever possible and validates model responses.
|
22
20
|
- **Observability and logging**: ModelFusion provides an observer framework and out-of-the-box logging support.
|
23
|
-
- **Resilience and
|
24
|
-
- **
|
21
|
+
- **Resilience and robustness**: ModelFusion ensures seamless operation through automatic retries, throttling, and error handling mechanisms.
|
22
|
+
- **Built for production**: ModelFusion is fully tree-shakeable, can be used in serverless environments, and only uses a minimal set of dependencies.
|
25
23
|
|
26
24
|
> [!NOTE]
|
27
|
-
> ModelFusion is
|
25
|
+
> ModelFusion is getting closer to a stable v1, which is expected in Q1/2024. The main API is now mostly stable, but until version 1.0 there may be breaking changes. Feedback and suggestions are welcome.
|
28
26
|
|
29
27
|
## Quick Install
|
30
28
|
|
@@ -47,8 +45,8 @@ You can provide API keys for the different [integrations](https://modelfusion.de
|
|
47
45
|
|
48
46
|
### [Generate Text](https://modelfusion.dev/guide/function/generate-text)
|
49
47
|
|
50
|
-
Generate text using a language model and a prompt. You can stream the text if it is supported by the model. You can use images for multi-modal prompting if the model supports it (e.g. with [llama.cpp](https://modelfusion.dev/
|
51
|
-
You can use [prompt templates](https://modelfusion.dev/guide/function/generate-text#prompt-
|
48
|
+
Generate text using a language model and a prompt. You can stream the text if it is supported by the model. You can use images for multi-modal prompting if the model supports it (e.g. with [llama.cpp](https://modelfusion.dev/integration/model-provider/llamacpp)).
|
49
|
+
You can use [prompt templates](https://modelfusion.dev/guide/function/generate-text#prompt-template) to change the prompt template of a model.
|
52
50
|
|
53
51
|
#### generateText
|
54
52
|
|
@@ -61,7 +59,7 @@ const text = await generateText(
|
|
61
59
|
);
|
62
60
|
```
|
63
61
|
|
64
|
-
Providers: [OpenAI](https://modelfusion.dev/integration/model-provider/openai), [OpenAI compatible](https://modelfusion.dev/integration/model-provider/openaicompatible), [Llama.cpp](https://modelfusion.dev/integration/model-provider/llamacpp), [Ollama](https://modelfusion.dev/integration/model-provider/ollama), [
|
62
|
+
Providers: [OpenAI](https://modelfusion.dev/integration/model-provider/openai), [OpenAI compatible](https://modelfusion.dev/integration/model-provider/openaicompatible), [Llama.cpp](https://modelfusion.dev/integration/model-provider/llamacpp), [Ollama](https://modelfusion.dev/integration/model-provider/ollama), [Mistral](https://modelfusion.dev/integration/model-provider/mistral), [Hugging Face](https://modelfusion.dev/integration/model-provider/huggingface), [Cohere](https://modelfusion.dev/integration/model-provider/cohere), [Anthropic](https://modelfusion.dev/integration/model-provider/anthropic)
|
65
63
|
|
66
64
|
#### streamText
|
67
65
|
|
@@ -326,53 +324,11 @@ const reconstructedText = await tokenizer.detokenize(tokens);
|
|
326
324
|
|
327
325
|
Providers: [OpenAI](https://modelfusion.dev/integration/model-provider/openai), [Llama.cpp](https://modelfusion.dev/integration/model-provider/llamacpp), [Cohere](https://modelfusion.dev/integration/model-provider/cohere)
|
328
326
|
|
329
|
-
### [Guards](https://modelfusion.dev/guide/guard)
|
330
|
-
|
331
|
-
Guard functions can be used to implement retry on error, redacting and changing reponses, etc.
|
332
|
-
|
333
|
-
#### Retry structure parsing on error
|
334
|
-
|
335
|
-
```ts
|
336
|
-
const result = await guard(
|
337
|
-
(input, options) =>
|
338
|
-
generateStructure(
|
339
|
-
openai
|
340
|
-
.ChatTextGenerator({
|
341
|
-
// ...
|
342
|
-
})
|
343
|
-
.asFunctionCallStructureGenerationModel({
|
344
|
-
fnName: "myFunction",
|
345
|
-
}),
|
346
|
-
zodSchema({
|
347
|
-
// ...
|
348
|
-
}),
|
349
|
-
input,
|
350
|
-
options
|
351
|
-
),
|
352
|
-
[
|
353
|
-
// ...
|
354
|
-
],
|
355
|
-
fixStructure({
|
356
|
-
modifyInputForRetry: async ({ input, error }) => [
|
357
|
-
...input,
|
358
|
-
openai.ChatMessage.assistant(null, {
|
359
|
-
functionCall: {
|
360
|
-
name: "sentiment",
|
361
|
-
arguments: JSON.stringify(error.valueText),
|
362
|
-
},
|
363
|
-
}),
|
364
|
-
openai.ChatMessage.user(error.message),
|
365
|
-
openai.ChatMessage.user("Please fix the error and try again."),
|
366
|
-
],
|
367
|
-
})
|
368
|
-
);
|
369
|
-
```
|
370
|
-
|
371
327
|
### [Tools](https://modelfusion.dev/guide/tools)
|
372
328
|
|
373
329
|
Tools are functions that can be executed by an AI model. They are useful for building chatbots and agents.
|
374
330
|
|
375
|
-
Predefined tools: [SerpAPI](https://modelfusion.dev/
|
331
|
+
Predefined tools: [Math.js](https://modelfusion.dev/guide/tools/predefined-tools/mathjs), [SerpAPI](https://modelfusion.dev/guide/tools/predefined-tools/serpapi), [Google Custom Search](https://modelfusion.dev/guide/tools/predefined-tools/google-custom-search)
|
376
332
|
|
377
333
|
#### [Creating Tools](https://modelfusion.dev/guide/tools/create-tools)
|
378
334
|
|
@@ -649,7 +605,7 @@ import { modelfusion } from "modelfusion";
|
|
649
605
|
modelfusion.setLogFormat("detailed-object"); // log full events
|
650
606
|
```
|
651
607
|
|
652
|
-
### [Server](https://modelfusion.dev/guide/server/)
|
608
|
+
### [Server](https://modelfusion.dev/guide/experimental/server/)
|
653
609
|
|
654
610
|
> [!WARNING]
|
655
611
|
> ModelFusion Server is in its initial development phase and not feature-complete. The API is experimental and breaking changes are likely. Feedback and suggestions are welcome.
|
@@ -668,7 +624,7 @@ import {
|
|
668
624
|
FileSystemAssetStorage,
|
669
625
|
FileSystemLogger,
|
670
626
|
modelFusionFastifyPlugin,
|
671
|
-
} from "modelfusion/fastify-server"; // '/fastify-server' import path
|
627
|
+
} from "modelfusion-experimental/fastify-server"; // '/fastify-server' import path
|
672
628
|
|
673
629
|
// configurable logging for all runs using ModelFusion observability:
|
674
630
|
const logger = new FileSystemLogger({
|
@@ -693,7 +649,7 @@ fastify.register(modelFusionFastifyPlugin, {
|
|
693
649
|
Using `invokeFlow`, you can easily connect your client to a ModelFusion flow endpoint:
|
694
650
|
|
695
651
|
```ts
|
696
|
-
import { invokeFlow } from "modelfusion/browser"; // '/browser' import path
|
652
|
+
import { invokeFlow } from "modelfusion-experimental/browser"; // '/browser' import path
|
697
653
|
|
698
654
|
invokeFlow({
|
699
655
|
url: `${BASE_URL}/myFlow`,
|
@@ -727,14 +683,12 @@ invokeFlow({
|
|
727
683
|
- [Generate structure or text](https://modelfusion.dev/guide/function/generate-structure-or-text)
|
728
684
|
- [Tokenize Text](https://modelfusion.dev/guide/function/tokenize-text)
|
729
685
|
- [Embed Value](https://modelfusion.dev/guide/function/embed)
|
730
|
-
- [Guards](https://modelfusion.dev/guide/guard)
|
731
686
|
- [Tools](https://modelfusion.dev/guide/tools)
|
732
687
|
- [Vector Indices](https://modelfusion.dev/guide/vector-index)
|
733
688
|
- [Upsert](https://modelfusion.dev/guide/vector-index/upsert)
|
734
689
|
- [Retrieve](https://modelfusion.dev/guide/vector-index/retrieve)
|
735
690
|
- [Text Chunks](https://modelfusion.dev/guide/text-chunk/)
|
736
691
|
- [Split Text](https://modelfusion.dev/guide/text-chunk/split)
|
737
|
-
- [Server](https://modelfusion.dev/guide/server/)
|
738
692
|
- [Utilities](https://modelfusion.dev/guide/util/)
|
739
693
|
- [API Configuration](https://modelfusion.dev/guide/util/api-configuration)
|
740
694
|
- [Retry strategies](https://modelfusion.dev/guide/util/api-configuration/retry)
|
@@ -743,7 +697,10 @@ invokeFlow({
|
|
743
697
|
- [Observers](https://modelfusion.dev/guide/util/observer)
|
744
698
|
- [Runs](https://modelfusion.dev/guide/util/run)
|
745
699
|
- [Abort signals](https://modelfusion.dev/guide/util/abort)
|
746
|
-
|
700
|
+
- [Experimental](https://modelfusion.dev/guide/experimental/)
|
701
|
+
- [Guards](https://modelfusion.dev/guide/experimental/guard)
|
702
|
+
- [Server](https://modelfusion.dev/guide/experimental/server/)
|
703
|
+
- [Cost calculation](https://modelfusion.dev/guide/experimental/cost-calculation)
|
747
704
|
- [Troubleshooting](https://modelfusion.dev/guide/troubleshooting)
|
748
705
|
- [Bundling](https://modelfusion.dev/guide/troubleshooting/bundling)
|
749
706
|
|
package/core/DefaultRun.cjs
CHANGED
@@ -2,7 +2,6 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.DefaultRun = void 0;
|
4
4
|
const nanoid_1 = require("nanoid");
|
5
|
-
const SuccessfulModelCall_js_1 = require("../model-function/SuccessfulModelCall.cjs");
|
6
5
|
const FunctionEventSource_js_1 = require("./FunctionEventSource.cjs");
|
7
6
|
class DefaultRun {
|
8
7
|
constructor({ runId = `run-${(0, nanoid_1.nanoid)()}`, sessionId, userId, abortSignal, observers, errorHandler, } = {}) {
|
@@ -69,8 +68,5 @@ class DefaultRun {
|
|
69
68
|
errorHandler: this.errorHandler.bind(this),
|
70
69
|
});
|
71
70
|
}
|
72
|
-
get successfulModelCalls() {
|
73
|
-
return (0, SuccessfulModelCall_js_1.extractSuccessfulModelCalls)(this.events);
|
74
|
-
}
|
75
71
|
}
|
76
72
|
exports.DefaultRun = DefaultRun;
|
package/core/DefaultRun.d.ts
CHANGED
@@ -1,4 +1,3 @@
|
|
1
|
-
import { SuccessfulModelCall } from "../model-function/SuccessfulModelCall.js";
|
2
1
|
import { ErrorHandler } from "../util/ErrorHandler.js";
|
3
2
|
import { FunctionEvent } from "./FunctionEvent.js";
|
4
3
|
import { FunctionObserver } from "./FunctionObserver.js";
|
@@ -22,5 +21,4 @@ export declare class DefaultRun implements Run {
|
|
22
21
|
readonly functionObserver: {
|
23
22
|
onFunctionEvent: (event: FunctionEvent) => void;
|
24
23
|
};
|
25
|
-
get successfulModelCalls(): Array<SuccessfulModelCall>;
|
26
24
|
}
|
package/core/DefaultRun.js
CHANGED
@@ -1,5 +1,4 @@
|
|
1
1
|
import { nanoid as createId } from "nanoid";
|
2
|
-
import { extractSuccessfulModelCalls, } from "../model-function/SuccessfulModelCall.js";
|
3
2
|
import { FunctionEventSource } from "./FunctionEventSource.js";
|
4
3
|
export class DefaultRun {
|
5
4
|
constructor({ runId = `run-${createId()}`, sessionId, userId, abortSignal, observers, errorHandler, } = {}) {
|
@@ -66,7 +65,4 @@ export class DefaultRun {
|
|
66
65
|
errorHandler: this.errorHandler.bind(this),
|
67
66
|
});
|
68
67
|
}
|
69
|
-
get successfulModelCalls() {
|
70
|
-
return extractSuccessfulModelCalls(this.events);
|
71
|
-
}
|
72
68
|
}
|
@@ -0,0 +1,11 @@
|
|
1
|
+
import { BaseFunctionFinishedEvent, BaseFunctionStartedEvent } from "./FunctionEvent.js";
|
2
|
+
export interface ExtensionFunctionStartedEvent extends BaseFunctionStartedEvent {
|
3
|
+
functionType: "extension";
|
4
|
+
extension: string;
|
5
|
+
data: unknown;
|
6
|
+
}
|
7
|
+
export interface ExtensionFunctionFinishedEvent extends BaseFunctionFinishedEvent {
|
8
|
+
functionType: "extension";
|
9
|
+
extension: string;
|
10
|
+
data: unknown;
|
11
|
+
}
|
package/core/FunctionEvent.d.ts
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
import {
|
1
|
+
import { ExtensionFunctionStartedEvent, ExtensionFunctionFinishedEvent } from "./ExtensionFunctionEvent.js";
|
2
2
|
import { ModelCallFinishedEvent, ModelCallStartedEvent } from "../model-function/ModelCallEvent.js";
|
3
3
|
import { RetrieveFinishedEvent, RetrieveStartedEvent } from "../retriever/RetrieveEvent.js";
|
4
4
|
import { ExecuteToolFinishedEvent, ExecuteToolStartedEvent } from "../tool/execute-tool/ExecuteToolEvent.js";
|
@@ -82,4 +82,4 @@ export interface BaseFunctionFinishedEvent extends BaseFunctionEvent {
|
|
82
82
|
*/
|
83
83
|
result: BaseFunctionFinishedEventResult;
|
84
84
|
}
|
85
|
-
export type FunctionEvent = ExecuteFunctionStartedEvent | ExecuteFunctionFinishedEvent | ExecuteToolStartedEvent | ExecuteToolFinishedEvent |
|
85
|
+
export type FunctionEvent = ExecuteFunctionStartedEvent | ExecuteFunctionFinishedEvent | ExecuteToolStartedEvent | ExecuteToolFinishedEvent | ExtensionFunctionStartedEvent | ExtensionFunctionFinishedEvent | ModelCallStartedEvent | ModelCallFinishedEvent | RetrieveStartedEvent | RetrieveFinishedEvent | UpsertIntoVectorIndexStartedEvent | UpsertIntoVectorIndexFinishedEvent | UseToolStartedEvent | UseToolFinishedEvent | UseToolsOrGenerateTextStartedEvent | UseToolsOrGenerateTextFinishedEvent;
|
package/extension/index.cjs
CHANGED
@@ -1,15 +1,34 @@
|
|
1
1
|
"use strict";
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
+
if (k2 === undefined) k2 = k;
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
7
|
+
}
|
8
|
+
Object.defineProperty(o, k2, desc);
|
9
|
+
}) : (function(o, m, k, k2) {
|
10
|
+
if (k2 === undefined) k2 = k;
|
11
|
+
o[k2] = m[k];
|
12
|
+
}));
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
15
|
+
};
|
2
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.AbstractModel = exports.
|
17
|
+
exports.parseEventSourceStream = exports.AbstractModel = exports.executeFunctionCall = exports.postToApi = exports.postJsonToApi = exports.createTextResponseHandler = exports.createJsonResponseHandler = exports.createAudioMpegResponseHandler = exports.loadApiKey = exports.callWithRetryAndThrottle = void 0;
|
18
|
+
__exportStar(require("../core/ExtensionFunctionEvent.cjs"), exports);
|
4
19
|
var callWithRetryAndThrottle_js_1 = require("../core/api/callWithRetryAndThrottle.cjs");
|
5
20
|
Object.defineProperty(exports, "callWithRetryAndThrottle", { enumerable: true, get: function () { return callWithRetryAndThrottle_js_1.callWithRetryAndThrottle; } });
|
6
21
|
var loadApiKey_js_1 = require("../core/api/loadApiKey.cjs");
|
7
22
|
Object.defineProperty(exports, "loadApiKey", { enumerable: true, get: function () { return loadApiKey_js_1.loadApiKey; } });
|
8
23
|
var postToApi_js_1 = require("../core/api/postToApi.cjs");
|
9
|
-
Object.defineProperty(exports, "postJsonToApi", { enumerable: true, get: function () { return postToApi_js_1.postJsonToApi; } });
|
10
|
-
Object.defineProperty(exports, "postToApi", { enumerable: true, get: function () { return postToApi_js_1.postToApi; } });
|
11
24
|
Object.defineProperty(exports, "createAudioMpegResponseHandler", { enumerable: true, get: function () { return postToApi_js_1.createAudioMpegResponseHandler; } });
|
12
25
|
Object.defineProperty(exports, "createJsonResponseHandler", { enumerable: true, get: function () { return postToApi_js_1.createJsonResponseHandler; } });
|
13
26
|
Object.defineProperty(exports, "createTextResponseHandler", { enumerable: true, get: function () { return postToApi_js_1.createTextResponseHandler; } });
|
27
|
+
Object.defineProperty(exports, "postJsonToApi", { enumerable: true, get: function () { return postToApi_js_1.postJsonToApi; } });
|
28
|
+
Object.defineProperty(exports, "postToApi", { enumerable: true, get: function () { return postToApi_js_1.postToApi; } });
|
29
|
+
var executeFunctionCall_js_1 = require("../core/executeFunctionCall.cjs");
|
30
|
+
Object.defineProperty(exports, "executeFunctionCall", { enumerable: true, get: function () { return executeFunctionCall_js_1.executeFunctionCall; } });
|
14
31
|
var AbstractModel_js_1 = require("../model-function/AbstractModel.cjs");
|
15
32
|
Object.defineProperty(exports, "AbstractModel", { enumerable: true, get: function () { return AbstractModel_js_1.AbstractModel; } });
|
33
|
+
var parseEventSourceStream_js_1 = require("../util/streaming/parseEventSourceStream.cjs");
|
34
|
+
Object.defineProperty(exports, "parseEventSourceStream", { enumerable: true, get: function () { return parseEventSourceStream_js_1.parseEventSourceStream; } });
|
package/extension/index.d.ts
CHANGED
@@ -1,4 +1,8 @@
|
|
1
|
+
export * from "../core/ExtensionFunctionEvent.js";
|
1
2
|
export { callWithRetryAndThrottle } from "../core/api/callWithRetryAndThrottle.js";
|
2
3
|
export { loadApiKey } from "../core/api/loadApiKey.js";
|
3
|
-
export {
|
4
|
+
export { ResponseHandler, createAudioMpegResponseHandler, createJsonResponseHandler, createTextResponseHandler, postJsonToApi, postToApi, } from "../core/api/postToApi.js";
|
5
|
+
export { executeFunctionCall } from "../core/executeFunctionCall.js";
|
4
6
|
export { AbstractModel } from "../model-function/AbstractModel.js";
|
7
|
+
export { ErrorHandler } from "../util/ErrorHandler.js";
|
8
|
+
export { parseEventSourceStream } from "../util/streaming/parseEventSourceStream.js";
|
package/extension/index.js
CHANGED
@@ -1,4 +1,7 @@
|
|
1
|
+
export * from "../core/ExtensionFunctionEvent.js";
|
1
2
|
export { callWithRetryAndThrottle } from "../core/api/callWithRetryAndThrottle.js";
|
2
3
|
export { loadApiKey } from "../core/api/loadApiKey.js";
|
3
|
-
export {
|
4
|
+
export { createAudioMpegResponseHandler, createJsonResponseHandler, createTextResponseHandler, postJsonToApi, postToApi, } from "../core/api/postToApi.js";
|
5
|
+
export { executeFunctionCall } from "../core/executeFunctionCall.js";
|
4
6
|
export { AbstractModel } from "../model-function/AbstractModel.js";
|
7
|
+
export { parseEventSourceStream } from "../util/streaming/parseEventSourceStream.js";
|
package/index.cjs
CHANGED
@@ -14,10 +14,7 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
14
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
15
15
|
};
|
16
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
17
|
-
__exportStar(require("./composed-function/index.cjs"), exports);
|
18
17
|
__exportStar(require("./core/index.cjs"), exports);
|
19
|
-
__exportStar(require("./cost/index.cjs"), exports);
|
20
|
-
__exportStar(require("./guard/index.cjs"), exports);
|
21
18
|
__exportStar(require("./model-function/index.cjs"), exports);
|
22
19
|
__exportStar(require("./model-provider/index.cjs"), exports);
|
23
20
|
__exportStar(require("./observability/index.cjs"), exports);
|
package/index.d.ts
CHANGED
@@ -1,7 +1,4 @@
|
|
1
|
-
export * from "./composed-function/index.js";
|
2
1
|
export * from "./core/index.js";
|
3
|
-
export * from "./cost/index.js";
|
4
|
-
export * from "./guard/index.js";
|
5
2
|
export * from "./model-function/index.js";
|
6
3
|
export * from "./model-provider/index.js";
|
7
4
|
export * from "./observability/index.js";
|
package/index.js
CHANGED
@@ -1,7 +1,4 @@
|
|
1
|
-
export * from "./composed-function/index.js";
|
2
1
|
export * from "./core/index.js";
|
3
|
-
export * from "./cost/index.js";
|
4
|
-
export * from "./guard/index.js";
|
5
2
|
export * from "./model-function/index.js";
|
6
3
|
export * from "./model-provider/index.js";
|
7
4
|
export * from "./observability/index.js";
|
@@ -2,10 +2,46 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.jsonStructurePrompt = void 0;
|
4
4
|
const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
5
|
+
const DEFAULT_SCHEMA_PREFIX = "JSON schema:";
|
6
|
+
const DEFAULT_SCHEMA_SUFFIX = "\nYou MUST answer with a JSON object matches the above schema.";
|
7
|
+
exports.jsonStructurePrompt = {
|
8
|
+
custom(createPrompt) {
|
9
|
+
return { createPrompt, extractStructure };
|
10
|
+
},
|
11
|
+
text({ schemaPrefix, schemaSuffix, } = {}) {
|
12
|
+
return {
|
13
|
+
createPrompt: (prompt, schema) => ({
|
14
|
+
system: createSystemPrompt({ schema, schemaPrefix, schemaSuffix }),
|
15
|
+
instruction: prompt,
|
16
|
+
}),
|
17
|
+
extractStructure,
|
18
|
+
};
|
19
|
+
},
|
20
|
+
instruction({ schemaPrefix, schemaSuffix, } = {}) {
|
21
|
+
return {
|
22
|
+
createPrompt: (prompt, schema) => ({
|
23
|
+
system: createSystemPrompt({
|
24
|
+
originalSystemPrompt: prompt.system,
|
25
|
+
schema,
|
26
|
+
schemaPrefix,
|
27
|
+
schemaSuffix,
|
28
|
+
}),
|
29
|
+
instruction: prompt.instruction,
|
30
|
+
}),
|
31
|
+
extractStructure,
|
32
|
+
};
|
33
|
+
},
|
34
|
+
};
|
35
|
+
function createSystemPrompt({ originalSystemPrompt, schema, schemaPrefix = DEFAULT_SCHEMA_PREFIX, schemaSuffix = DEFAULT_SCHEMA_SUFFIX, }) {
|
36
|
+
return [
|
37
|
+
originalSystemPrompt,
|
38
|
+
schemaPrefix,
|
39
|
+
JSON.stringify(schema.getJsonSchema()),
|
40
|
+
schemaSuffix,
|
41
|
+
]
|
42
|
+
.filter(Boolean)
|
43
|
+
.join("\n");
|
44
|
+
}
|
45
|
+
function extractStructure(response) {
|
46
|
+
return (0, parseJSON_js_1.parseJSON)({ text: response });
|
10
47
|
}
|
11
|
-
exports.jsonStructurePrompt = jsonStructurePrompt;
|
@@ -1,4 +1,15 @@
|
|
1
1
|
import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
|
2
2
|
import { Schema } from "../../core/schema/Schema.js";
|
3
|
+
import { InstructionPrompt } from "../../model-function/generate-text/prompt-template/InstructionPrompt.js";
|
3
4
|
import { StructureFromTextPromptTemplate } from "./StructureFromTextPromptTemplate.js";
|
4
|
-
export declare
|
5
|
+
export declare const jsonStructurePrompt: {
|
6
|
+
custom<SOURCE_PROMPT, TARGET_PROMPT>(createPrompt: (prompt: SOURCE_PROMPT, schema: Schema<unknown> & JsonSchemaProducer) => TARGET_PROMPT): StructureFromTextPromptTemplate<SOURCE_PROMPT, TARGET_PROMPT>;
|
7
|
+
text({ schemaPrefix, schemaSuffix, }?: {
|
8
|
+
schemaPrefix?: string | undefined;
|
9
|
+
schemaSuffix?: string | undefined;
|
10
|
+
}): StructureFromTextPromptTemplate<string, InstructionPrompt>;
|
11
|
+
instruction({ schemaPrefix, schemaSuffix, }?: {
|
12
|
+
schemaPrefix?: string | undefined;
|
13
|
+
schemaSuffix?: string | undefined;
|
14
|
+
}): StructureFromTextPromptTemplate<InstructionPrompt, InstructionPrompt>;
|
15
|
+
};
|
@@ -1,7 +1,44 @@
|
|
1
1
|
import { parseJSON } from "../../core/schema/parseJSON.js";
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
2
|
+
const DEFAULT_SCHEMA_PREFIX = "JSON schema:";
|
3
|
+
const DEFAULT_SCHEMA_SUFFIX = "\nYou MUST answer with a JSON object matches the above schema.";
|
4
|
+
export const jsonStructurePrompt = {
|
5
|
+
custom(createPrompt) {
|
6
|
+
return { createPrompt, extractStructure };
|
7
|
+
},
|
8
|
+
text({ schemaPrefix, schemaSuffix, } = {}) {
|
9
|
+
return {
|
10
|
+
createPrompt: (prompt, schema) => ({
|
11
|
+
system: createSystemPrompt({ schema, schemaPrefix, schemaSuffix }),
|
12
|
+
instruction: prompt,
|
13
|
+
}),
|
14
|
+
extractStructure,
|
15
|
+
};
|
16
|
+
},
|
17
|
+
instruction({ schemaPrefix, schemaSuffix, } = {}) {
|
18
|
+
return {
|
19
|
+
createPrompt: (prompt, schema) => ({
|
20
|
+
system: createSystemPrompt({
|
21
|
+
originalSystemPrompt: prompt.system,
|
22
|
+
schema,
|
23
|
+
schemaPrefix,
|
24
|
+
schemaSuffix,
|
25
|
+
}),
|
26
|
+
instruction: prompt.instruction,
|
27
|
+
}),
|
28
|
+
extractStructure,
|
29
|
+
};
|
30
|
+
},
|
31
|
+
};
|
32
|
+
function createSystemPrompt({ originalSystemPrompt, schema, schemaPrefix = DEFAULT_SCHEMA_PREFIX, schemaSuffix = DEFAULT_SCHEMA_SUFFIX, }) {
|
33
|
+
return [
|
34
|
+
originalSystemPrompt,
|
35
|
+
schemaPrefix,
|
36
|
+
JSON.stringify(schema.getJsonSchema()),
|
37
|
+
schemaSuffix,
|
38
|
+
]
|
39
|
+
.filter(Boolean)
|
40
|
+
.join("\n");
|
41
|
+
}
|
42
|
+
function extractStructure(response) {
|
43
|
+
return parseJSON({ text: response });
|
7
44
|
}
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
2
2
|
import { TextGenerationToolCallModel, ToolCallPromptTemplate } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
|
3
|
-
import { TextGenerationToolCallsOrGenerateTextModel
|
3
|
+
import { TextGenerationToolCallsOrGenerateTextModel } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
|
4
|
+
import { ToolCallsOrGenerateTextPromptTemplate } from "../../tool/generate-tool-calls-or-text/ToolCallsOrGenerateTextPromptTemplate.js";
|
4
5
|
import { StructureFromTextGenerationModel } from "../generate-structure/StructureFromTextGenerationModel.js";
|
5
6
|
import { StructureFromTextPromptTemplate } from "../generate-structure/StructureFromTextPromptTemplate.js";
|
6
7
|
import { TextGenerationModel, TextGenerationModelSettings } from "./TextGenerationModel.js";
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { TextGenerationToolCallModel, } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
|
2
|
-
import { TextGenerationToolCallsOrGenerateTextModel
|
2
|
+
import { TextGenerationToolCallsOrGenerateTextModel } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
|
3
3
|
import { StructureFromTextGenerationModel } from "../generate-structure/StructureFromTextGenerationModel.js";
|
4
4
|
export class PromptTemplateTextGenerationModel {
|
5
5
|
constructor({ model, promptTemplate, }) {
|
@@ -46,4 +46,15 @@ describe("chat prompt", () => {
|
|
46
46
|
});
|
47
47
|
expect(prompt).toMatchSnapshot();
|
48
48
|
});
|
49
|
+
it("should format prompt with system message and user-assistant-user messages", () => {
|
50
|
+
const prompt = (0, ChatMLPromptTemplate_js_1.chat)().format({
|
51
|
+
system: "you are a chatbot",
|
52
|
+
messages: [
|
53
|
+
{ role: "user", content: "1st user message" },
|
54
|
+
{ role: "assistant", content: "assistant message" },
|
55
|
+
{ role: "user", content: "2nd user message" },
|
56
|
+
],
|
57
|
+
});
|
58
|
+
expect(prompt).toMatchSnapshot();
|
59
|
+
});
|
49
60
|
});
|
@@ -44,4 +44,15 @@ describe("chat prompt", () => {
|
|
44
44
|
});
|
45
45
|
expect(prompt).toMatchSnapshot();
|
46
46
|
});
|
47
|
+
it("should format prompt with system message and user-assistant-user messages", () => {
|
48
|
+
const prompt = chat().format({
|
49
|
+
system: "you are a chatbot",
|
50
|
+
messages: [
|
51
|
+
{ role: "user", content: "1st user message" },
|
52
|
+
{ role: "assistant", content: "assistant message" },
|
53
|
+
{ role: "user", content: "2nd user message" },
|
54
|
+
],
|
55
|
+
});
|
56
|
+
expect(prompt).toMatchSnapshot();
|
57
|
+
});
|
47
58
|
});
|
@@ -15,7 +15,7 @@ const END_SYSTEM = "\n<</SYS>>\n\n";
|
|
15
15
|
*
|
16
16
|
* Llama 2 prompt template:
|
17
17
|
* ```
|
18
|
-
* <s>[INST]{ instruction } [/INST]
|
18
|
+
* <s>[INST] { instruction } [/INST]
|
19
19
|
* ```
|
20
20
|
*
|
21
21
|
* @see https://www.philschmid.de/llama-2#how-to-prompt-llama-2-chat
|
@@ -24,7 +24,7 @@ function text() {
|
|
24
24
|
return {
|
25
25
|
stopSequences: [END_SEGMENT],
|
26
26
|
format(prompt) {
|
27
|
-
return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt}${END_INSTRUCTION}
|
27
|
+
return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt}${END_INSTRUCTION}`;
|
28
28
|
},
|
29
29
|
};
|
30
30
|
}
|
@@ -72,12 +72,14 @@ function chat() {
|
|
72
72
|
return {
|
73
73
|
format(prompt) {
|
74
74
|
validateLlama2Prompt(prompt);
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
: ""
|
80
|
-
|
75
|
+
// get content of the first message (validated to be a user message)
|
76
|
+
const content = prompt.messages[0].content;
|
77
|
+
let text = `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt.system != null
|
78
|
+
? `${BEGIN_SYSTEM}${prompt.system}${END_SYSTEM}`
|
79
|
+
: ""}${content}${END_INSTRUCTION}`;
|
80
|
+
// process remaining messages
|
81
|
+
for (let i = 1; i < prompt.messages.length; i++) {
|
82
|
+
const { role, content } = prompt.messages[i];
|
81
83
|
switch (role) {
|
82
84
|
case "user": {
|
83
85
|
const textContent = (0, ContentPart_js_1.validateContentIsString)(content, prompt);
|
@@ -12,7 +12,7 @@ const END_SYSTEM = "\n<</SYS>>\n\n";
|
|
12
12
|
*
|
13
13
|
* Llama 2 prompt template:
|
14
14
|
* ```
|
15
|
-
* <s>[INST]{ instruction } [/INST]
|
15
|
+
* <s>[INST] { instruction } [/INST]
|
16
16
|
* ```
|
17
17
|
*
|
18
18
|
* @see https://www.philschmid.de/llama-2#how-to-prompt-llama-2-chat
|
@@ -21,7 +21,7 @@ export function text() {
|
|
21
21
|
return {
|
22
22
|
stopSequences: [END_SEGMENT],
|
23
23
|
format(prompt) {
|
24
|
-
return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt}${END_INSTRUCTION}
|
24
|
+
return `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt}${END_INSTRUCTION}`;
|
25
25
|
},
|
26
26
|
};
|
27
27
|
}
|
@@ -67,12 +67,14 @@ export function chat() {
|
|
67
67
|
return {
|
68
68
|
format(prompt) {
|
69
69
|
validateLlama2Prompt(prompt);
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
: ""
|
75
|
-
|
70
|
+
// get content of the first message (validated to be a user message)
|
71
|
+
const content = prompt.messages[0].content;
|
72
|
+
let text = `${BEGIN_SEGMENT}${BEGIN_INSTRUCTION}${prompt.system != null
|
73
|
+
? `${BEGIN_SYSTEM}${prompt.system}${END_SYSTEM}`
|
74
|
+
: ""}${content}${END_INSTRUCTION}`;
|
75
|
+
// process remaining messages
|
76
|
+
for (let i = 1; i < prompt.messages.length; i++) {
|
77
|
+
const { role, content } = prompt.messages[i];
|
76
78
|
switch (role) {
|
77
79
|
case "user": {
|
78
80
|
const textContent = validateContentIsString(content, prompt);
|
@@ -46,4 +46,15 @@ describe("chat prompt", () => {
|
|
46
46
|
});
|
47
47
|
expect(prompt).toMatchSnapshot();
|
48
48
|
});
|
49
|
+
it("should format prompt with system message and user-assistant-user messages", () => {
|
50
|
+
const prompt = (0, Llama2PromptTemplate_js_1.chat)().format({
|
51
|
+
system: "you are a chatbot",
|
52
|
+
messages: [
|
53
|
+
{ role: "user", content: "1st user message" },
|
54
|
+
{ role: "assistant", content: "assistant message" },
|
55
|
+
{ role: "user", content: "2nd user message" },
|
56
|
+
],
|
57
|
+
});
|
58
|
+
expect(prompt).toMatchSnapshot();
|
59
|
+
});
|
49
60
|
});
|