modelfusion 0.106.0 → 0.108.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. package/CHANGELOG.md +59 -0
  2. package/README.md +19 -59
  3. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.cjs +11 -0
  4. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.js +11 -0
  5. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +9 -7
  6. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +9 -7
  7. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.cjs +11 -0
  8. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.js +11 -0
  9. package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.cjs +150 -0
  10. package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.d.ts +62 -0
  11. package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.js +143 -0
  12. package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.cjs +60 -0
  13. package/model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.js +58 -0
  14. package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.test.cjs +11 -0
  15. package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.test.js +11 -0
  16. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.cjs +11 -0
  17. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.js +11 -0
  18. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +11 -0
  19. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +11 -0
  20. package/model-function/generate-text/prompt-template/index.cjs +2 -1
  21. package/model-function/generate-text/prompt-template/index.d.ts +1 -0
  22. package/model-function/generate-text/prompt-template/index.js +1 -0
  23. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +3 -3
  24. package/model-provider/llamacpp/{LlamaCppTextGenerationModel.cjs → LlamaCppCompletionModel.cjs} +25 -11
  25. package/model-provider/llamacpp/{LlamaCppTextGenerationModel.d.ts → LlamaCppCompletionModel.d.ts} +125 -38
  26. package/model-provider/llamacpp/{LlamaCppTextGenerationModel.js → LlamaCppCompletionModel.js} +23 -9
  27. package/model-provider/llamacpp/{LlamaCppTextGenerationModel.test.cjs → LlamaCppCompletionModel.test.cjs} +3 -3
  28. package/model-provider/llamacpp/LlamaCppCompletionModel.test.d.ts +1 -0
  29. package/model-provider/llamacpp/{LlamaCppTextGenerationModel.test.js → LlamaCppCompletionModel.test.js} +3 -3
  30. package/model-provider/llamacpp/LlamaCppFacade.cjs +2 -2
  31. package/model-provider/llamacpp/LlamaCppFacade.d.ts +2 -2
  32. package/model-provider/llamacpp/LlamaCppFacade.js +2 -2
  33. package/model-provider/llamacpp/index.cjs +1 -1
  34. package/model-provider/llamacpp/index.d.ts +1 -1
  35. package/model-provider/llamacpp/index.js +1 -1
  36. package/model-provider/mistral/MistralChatModel.cjs +4 -4
  37. package/model-provider/mistral/MistralChatModel.d.ts +6 -6
  38. package/model-provider/mistral/MistralChatModel.js +1 -1
  39. package/model-provider/mistral/index.cjs +3 -3
  40. package/model-provider/mistral/index.d.ts +2 -2
  41. package/model-provider/mistral/index.js +2 -2
  42. package/model-provider/openai/AbstractOpenAIChatModel.cjs +2 -10
  43. package/model-provider/openai/AbstractOpenAIChatModel.d.ts +13 -195
  44. package/model-provider/openai/AbstractOpenAIChatModel.js +2 -10
  45. package/model-provider/openai/AbstractOpenAICompletionModel.cjs +167 -0
  46. package/model-provider/openai/AbstractOpenAICompletionModel.d.ts +199 -0
  47. package/model-provider/openai/AbstractOpenAICompletionModel.js +163 -0
  48. package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.d.ts +1 -3
  49. package/model-provider/openai/OpenAIChatModel.d.ts +3 -6
  50. package/model-provider/openai/OpenAICompletionModel.cjs +4 -156
  51. package/model-provider/openai/OpenAICompletionModel.d.ts +4 -191
  52. package/model-provider/openai/OpenAICompletionModel.js +3 -155
  53. package/model-provider/openai/index.cjs +1 -0
  54. package/model-provider/openai/index.d.ts +1 -0
  55. package/model-provider/openai/index.js +1 -0
  56. package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +4 -5
  57. package/model-provider/openai-compatible/OpenAICompatibleCompletionModel.cjs +74 -0
  58. package/model-provider/openai-compatible/OpenAICompatibleCompletionModel.d.ts +27 -0
  59. package/model-provider/openai-compatible/OpenAICompatibleCompletionModel.js +70 -0
  60. package/model-provider/openai-compatible/OpenAICompatibleFacade.cjs +37 -6
  61. package/model-provider/openai-compatible/OpenAICompatibleFacade.d.ts +33 -5
  62. package/model-provider/openai-compatible/OpenAICompatibleFacade.js +35 -5
  63. package/model-provider/openai-compatible/OpenAICompatibleProviderName.cjs +2 -0
  64. package/model-provider/openai-compatible/OpenAICompatibleProviderName.d.ts +1 -0
  65. package/model-provider/openai-compatible/OpenAICompatibleProviderName.js +1 -0
  66. package/model-provider/openai-compatible/TogetherAIApiConfiguration.cjs +29 -0
  67. package/model-provider/openai-compatible/TogetherAIApiConfiguration.d.ts +18 -0
  68. package/model-provider/openai-compatible/TogetherAIApiConfiguration.js +25 -0
  69. package/model-provider/openai-compatible/index.cjs +4 -1
  70. package/model-provider/openai-compatible/index.d.ts +4 -1
  71. package/model-provider/openai-compatible/index.js +4 -1
  72. package/package.json +16 -16
  73. package/tool/generate-tool-call/index.cjs +1 -0
  74. package/tool/generate-tool-call/index.d.ts +1 -0
  75. package/tool/generate-tool-call/index.js +1 -0
  76. package/tool/generate-tool-call/jsonToolCallPrompt.cjs +30 -0
  77. package/tool/generate-tool-call/jsonToolCallPrompt.d.ts +5 -0
  78. package/tool/generate-tool-call/jsonToolCallPrompt.js +27 -0
  79. /package/{model-provider/llamacpp/LlamaCppTextGenerationModel.test.d.ts → model-function/generate-text/prompt-template/MistralInstructPromptTemplate.test.d.ts} +0 -0
  80. /package/model-provider/mistral/{MistralPromptTemplate.cjs → MistralChatPromptTemplate.cjs} +0 -0
  81. /package/model-provider/mistral/{MistralPromptTemplate.d.ts → MistralChatPromptTemplate.d.ts} +0 -0
  82. /package/model-provider/mistral/{MistralPromptTemplate.js → MistralChatPromptTemplate.js} +0 -0
@@ -1,7 +1,34 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ChatTextGenerator = void 0;
3
+ exports.ChatTextGenerator = exports.CompletionTextGenerator = void 0;
4
4
  const OpenAICompatibleChatModel_js_1 = require("./OpenAICompatibleChatModel.cjs");
5
+ const OpenAICompatibleCompletionModel_js_1 = require("./OpenAICompatibleCompletionModel.cjs");
6
+ /**
7
+ * Create a text generation model that calls an API that is compatible with OpenAI's completion API.
8
+ *
9
+ * Please note that many providers implement the API with slight differences, which can cause
10
+ * unexpected errors and different behavior in less common scenarios.
11
+ *
12
+ * @see https://platform.openai.com/docs/api-reference/completions/create
13
+ *
14
+ * @example
15
+ * ```ts
16
+ * const model = openaicompatible.CompletionTextGenerator({
17
+ * model: "provider-specific-model-name",
18
+ * temperature: 0.7,
19
+ * maxGenerationTokens: 500,
20
+ * });
21
+ *
22
+ * const text = await generateText(
23
+ * model,
24
+ * "Write a short story about a robot learning to love:"
25
+ * );
26
+ * ```
27
+ */
28
+ function CompletionTextGenerator(settings) {
29
+ return new OpenAICompatibleCompletionModel_js_1.OpenAICompatibleCompletionModel(settings);
30
+ }
31
+ exports.CompletionTextGenerator = CompletionTextGenerator;
5
32
  /**
6
33
  * Create a text generation model that calls an API that is compatible with OpenAI's chat API.
7
34
  *
@@ -11,18 +38,22 @@ const OpenAICompatibleChatModel_js_1 = require("./OpenAICompatibleChatModel.cjs"
11
38
  * @see https://platform.openai.com/docs/api-reference/chat/create
12
39
  *
13
40
  * @example
41
+ * ```ts
14
42
  * const model = openaicompatible.ChatTextGenerator({
15
43
  * model: "provider-specific-model-name",
16
44
  * temperature: 0.7,
17
45
  * maxGenerationTokens: 500,
18
46
  * });
19
47
  *
20
- * const text = await generateText([
48
+ * const text = await generateText(
21
49
  * model,
22
- * openai.ChatMessage.system(
23
- * "Write a short story about a robot learning to love:"
24
- * ),
25
- * ]);
50
+ * [
51
+ * openai.ChatMessage.user(
52
+ * "Write a short story about a robot learning to love:"
53
+ * ),
54
+ * ]
55
+ * );
56
+ * ```
26
57
  */
27
58
  function ChatTextGenerator(settings) {
28
59
  return new OpenAICompatibleChatModel_js_1.OpenAICompatibleChatModel(settings);
@@ -1,4 +1,28 @@
1
1
  import { OpenAICompatibleChatModel, OpenAICompatibleChatSettings } from "./OpenAICompatibleChatModel.js";
2
+ import { OpenAICompatibleCompletionModel } from "./OpenAICompatibleCompletionModel.js";
3
+ /**
4
+ * Create a text generation model that calls an API that is compatible with OpenAI's completion API.
5
+ *
6
+ * Please note that many providers implement the API with slight differences, which can cause
7
+ * unexpected errors and different behavior in less common scenarios.
8
+ *
9
+ * @see https://platform.openai.com/docs/api-reference/completions/create
10
+ *
11
+ * @example
12
+ * ```ts
13
+ * const model = openaicompatible.CompletionTextGenerator({
14
+ * model: "provider-specific-model-name",
15
+ * temperature: 0.7,
16
+ * maxGenerationTokens: 500,
17
+ * });
18
+ *
19
+ * const text = await generateText(
20
+ * model,
21
+ * "Write a short story about a robot learning to love:"
22
+ * );
23
+ * ```
24
+ */
25
+ export declare function CompletionTextGenerator(settings: OpenAICompatibleChatSettings): OpenAICompatibleCompletionModel;
2
26
  /**
3
27
  * Create a text generation model that calls an API that is compatible with OpenAI's chat API.
4
28
  *
@@ -8,17 +32,21 @@ import { OpenAICompatibleChatModel, OpenAICompatibleChatSettings } from "./OpenA
8
32
  * @see https://platform.openai.com/docs/api-reference/chat/create
9
33
  *
10
34
  * @example
35
+ * ```ts
11
36
  * const model = openaicompatible.ChatTextGenerator({
12
37
  * model: "provider-specific-model-name",
13
38
  * temperature: 0.7,
14
39
  * maxGenerationTokens: 500,
15
40
  * });
16
41
  *
17
- * const text = await generateText([
42
+ * const text = await generateText(
18
43
  * model,
19
- * openai.ChatMessage.system(
20
- * "Write a short story about a robot learning to love:"
21
- * ),
22
- * ]);
44
+ * [
45
+ * openai.ChatMessage.user(
46
+ * "Write a short story about a robot learning to love:"
47
+ * ),
48
+ * ]
49
+ * );
50
+ * ```
23
51
  */
24
52
  export declare function ChatTextGenerator(settings: OpenAICompatibleChatSettings): OpenAICompatibleChatModel;
@@ -1,4 +1,30 @@
1
1
  import { OpenAICompatibleChatModel, } from "./OpenAICompatibleChatModel.js";
2
+ import { OpenAICompatibleCompletionModel } from "./OpenAICompatibleCompletionModel.js";
3
+ /**
4
+ * Create a text generation model that calls an API that is compatible with OpenAI's completion API.
5
+ *
6
+ * Please note that many providers implement the API with slight differences, which can cause
7
+ * unexpected errors and different behavior in less common scenarios.
8
+ *
9
+ * @see https://platform.openai.com/docs/api-reference/completions/create
10
+ *
11
+ * @example
12
+ * ```ts
13
+ * const model = openaicompatible.CompletionTextGenerator({
14
+ * model: "provider-specific-model-name",
15
+ * temperature: 0.7,
16
+ * maxGenerationTokens: 500,
17
+ * });
18
+ *
19
+ * const text = await generateText(
20
+ * model,
21
+ * "Write a short story about a robot learning to love:"
22
+ * );
23
+ * ```
24
+ */
25
+ export function CompletionTextGenerator(settings) {
26
+ return new OpenAICompatibleCompletionModel(settings);
27
+ }
2
28
  /**
3
29
  * Create a text generation model that calls an API that is compatible with OpenAI's chat API.
4
30
  *
@@ -8,18 +34,22 @@ import { OpenAICompatibleChatModel, } from "./OpenAICompatibleChatModel.js";
8
34
  * @see https://platform.openai.com/docs/api-reference/chat/create
9
35
  *
10
36
  * @example
37
+ * ```ts
11
38
  * const model = openaicompatible.ChatTextGenerator({
12
39
  * model: "provider-specific-model-name",
13
40
  * temperature: 0.7,
14
41
  * maxGenerationTokens: 500,
15
42
  * });
16
43
  *
17
- * const text = await generateText([
44
+ * const text = await generateText(
18
45
  * model,
19
- * openai.ChatMessage.system(
20
- * "Write a short story about a robot learning to love:"
21
- * ),
22
- * ]);
46
+ * [
47
+ * openai.ChatMessage.user(
48
+ * "Write a short story about a robot learning to love:"
49
+ * ),
50
+ * ]
51
+ * );
52
+ * ```
23
53
  */
24
54
  export function ChatTextGenerator(settings) {
25
55
  return new OpenAICompatibleChatModel(settings);
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1 @@
1
+ export type OpenAICompatibleProviderName = `openaicompatible` | `openaicompatible-${string}`;
@@ -0,0 +1,29 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.TogetherAIApiConfiguration = void 0;
4
+ const BaseUrlApiConfiguration_js_1 = require("../../core/api/BaseUrlApiConfiguration.cjs");
5
+ const loadApiKey_js_1 = require("../../core/api/loadApiKey.cjs");
6
+ /**
7
+ * Configuration for the Together.ai API.
8
+ *
9
+ * It uses the `TOGETHER_API_KEY` api key environment variable.
10
+ *
11
+ * @see https://docs.together.ai/docs/openai-api-compatibility
12
+ */
13
+ class TogetherAIApiConfiguration extends BaseUrlApiConfiguration_js_1.BaseUrlApiConfiguration {
14
+ constructor({ baseUrl = "https://api.together.xyz/v1", apiKey, retry, throttle, } = {}) {
15
+ super({
16
+ baseUrl,
17
+ headers: {
18
+ Authorization: `Bearer ${(0, loadApiKey_js_1.loadApiKey)({
19
+ apiKey,
20
+ environmentVariableName: "TOGETHER_API_KEY",
21
+ description: "Together AI",
22
+ })}`,
23
+ },
24
+ retry,
25
+ throttle,
26
+ });
27
+ }
28
+ }
29
+ exports.TogetherAIApiConfiguration = TogetherAIApiConfiguration;
@@ -0,0 +1,18 @@
1
+ import { BaseUrlApiConfiguration } from "../../core/api/BaseUrlApiConfiguration.js";
2
+ import { RetryFunction } from "../../core/api/RetryFunction.js";
3
+ import { ThrottleFunction } from "../../core/api/ThrottleFunction.js";
4
+ /**
5
+ * Configuration for the Together.ai API.
6
+ *
7
+ * It uses the `TOGETHER_API_KEY` api key environment variable.
8
+ *
9
+ * @see https://docs.together.ai/docs/openai-api-compatibility
10
+ */
11
+ export declare class TogetherAIApiConfiguration extends BaseUrlApiConfiguration {
12
+ constructor({ baseUrl, apiKey, retry, throttle, }?: {
13
+ baseUrl?: string;
14
+ apiKey?: string;
15
+ retry?: RetryFunction;
16
+ throttle?: ThrottleFunction;
17
+ });
18
+ }
@@ -0,0 +1,25 @@
1
+ import { BaseUrlApiConfiguration } from "../../core/api/BaseUrlApiConfiguration.js";
2
+ import { loadApiKey } from "../../core/api/loadApiKey.js";
3
+ /**
4
+ * Configuration for the Together.ai API.
5
+ *
6
+ * It uses the `TOGETHER_API_KEY` api key environment variable.
7
+ *
8
+ * @see https://docs.together.ai/docs/openai-api-compatibility
9
+ */
10
+ export class TogetherAIApiConfiguration extends BaseUrlApiConfiguration {
11
+ constructor({ baseUrl = "https://api.together.xyz/v1", apiKey, retry, throttle, } = {}) {
12
+ super({
13
+ baseUrl,
14
+ headers: {
15
+ Authorization: `Bearer ${loadApiKey({
16
+ apiKey,
17
+ environmentVariableName: "TOGETHER_API_KEY",
18
+ description: "Together AI",
19
+ })}`,
20
+ },
21
+ retry,
22
+ throttle,
23
+ });
24
+ }
25
+ }
@@ -27,6 +27,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
29
  exports.openaicompatible = void 0;
30
+ __exportStar(require("./FireworksAIApiConfiguration.cjs"), exports);
30
31
  __exportStar(require("./OpenAICompatibleChatModel.cjs"), exports);
32
+ __exportStar(require("./OpenAICompatibleCompletionModel.cjs"), exports);
31
33
  exports.openaicompatible = __importStar(require("./OpenAICompatibleFacade.cjs"));
32
- __exportStar(require("./FireworksAIApiConfiguration.cjs"), exports);
34
+ __exportStar(require("./OpenAICompatibleProviderName.cjs"), exports);
35
+ __exportStar(require("./TogetherAIApiConfiguration.cjs"), exports);
@@ -1,3 +1,6 @@
1
+ export * from "./FireworksAIApiConfiguration.js";
1
2
  export * from "./OpenAICompatibleChatModel.js";
3
+ export * from "./OpenAICompatibleCompletionModel.js";
2
4
  export * as openaicompatible from "./OpenAICompatibleFacade.js";
3
- export * from "./FireworksAIApiConfiguration.js";
5
+ export * from "./OpenAICompatibleProviderName.js";
6
+ export * from "./TogetherAIApiConfiguration.js";
@@ -1,3 +1,6 @@
1
+ export * from "./FireworksAIApiConfiguration.js";
1
2
  export * from "./OpenAICompatibleChatModel.js";
3
+ export * from "./OpenAICompatibleCompletionModel.js";
2
4
  export * as openaicompatible from "./OpenAICompatibleFacade.js";
3
- export * from "./FireworksAIApiConfiguration.js";
5
+ export * from "./OpenAICompatibleProviderName.js";
6
+ export * from "./TogetherAIApiConfiguration.js";
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "modelfusion",
3
3
  "description": "The TypeScript library for building multi-modal AI applications.",
4
- "version": "0.106.0",
4
+ "version": "0.108.0",
5
5
  "author": "Lars Grammel",
6
6
  "license": "MIT",
7
7
  "keywords": [
@@ -48,20 +48,6 @@
48
48
  "require": "./extension/index.cjs"
49
49
  }
50
50
  },
51
- "scripts": {
52
- "lint": "eslint --ext .ts src",
53
- "clean": "rimraf build dist .turbo node_modules",
54
- "clean:build": "rimraf build dist",
55
- "build": "pnpm build:esm && pnpm build:cjs && pnpm build:copy-files",
56
- "build:esm": "tsc --outDir dist/",
57
- "build:cjs": "tsc --outDir build/cjs/ -p tsconfig.cjs.json && node bin/prepare-cjs.js",
58
- "build:copy-files": "copyfiles --flat package.json ../../README.md ../../LICENSE ../../CHANGELOG.md dist",
59
- "test": "vitest --config vitest.config.js --run src",
60
- "test:watch": "vitest watch --config vitest.config.js",
61
- "test:coverage": "vitest run --config vitest.config.js --coverage",
62
- "test:coverage:ui": "vitest --config vitest.config.js --coverage --ui",
63
- "dist": "pnpm clean:build && pnpm lint && pnpm test && pnpm build"
64
- },
65
51
  "dependencies": {
66
52
  "eventsource-parser": "1.1.1",
67
53
  "js-tiktoken": "1.0.7",
@@ -81,5 +67,19 @@
81
67
  "eslint": "^8.45.0",
82
68
  "eslint-config-prettier": "9.1.0",
83
69
  "msw": "2.0.11"
70
+ },
71
+ "scripts": {
72
+ "lint": "eslint --ext .ts src",
73
+ "clean": "rimraf build dist .turbo node_modules",
74
+ "clean:build": "rimraf build dist",
75
+ "build": "pnpm build:esm && pnpm build:cjs && pnpm build:copy-files",
76
+ "build:esm": "tsc --outDir dist/",
77
+ "build:cjs": "tsc --outDir build/cjs/ -p tsconfig.cjs.json && node bin/prepare-cjs.js",
78
+ "build:copy-files": "copyfiles --flat package.json ../../README.md ../../LICENSE ../../CHANGELOG.md dist",
79
+ "test": "vitest --config vitest.config.js --run src",
80
+ "test:watch": "vitest watch --config vitest.config.js",
81
+ "test:coverage": "vitest run --config vitest.config.js --coverage",
82
+ "test:coverage:ui": "vitest --config vitest.config.js --coverage --ui",
83
+ "dist": "pnpm clean:build && pnpm lint && pnpm test && pnpm build"
84
84
  }
85
- }
85
+ }
@@ -19,3 +19,4 @@ __exportStar(require("./ToolCallGenerationEvent.cjs"), exports);
19
19
  __exportStar(require("./ToolCallGenerationModel.cjs"), exports);
20
20
  __exportStar(require("./ToolCallParseError.cjs"), exports);
21
21
  __exportStar(require("./generateToolCall.cjs"), exports);
22
+ __exportStar(require("./jsonToolCallPrompt.cjs"), exports);
@@ -3,3 +3,4 @@ export * from "./ToolCallGenerationEvent.js";
3
3
  export * from "./ToolCallGenerationModel.js";
4
4
  export * from "./ToolCallParseError.js";
5
5
  export * from "./generateToolCall.js";
6
+ export * from "./jsonToolCallPrompt.js";
@@ -3,3 +3,4 @@ export * from "./ToolCallGenerationEvent.js";
3
3
  export * from "./ToolCallGenerationModel.js";
4
4
  export * from "./ToolCallParseError.js";
5
5
  export * from "./generateToolCall.js";
6
+ export * from "./jsonToolCallPrompt.js";
@@ -0,0 +1,30 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.jsonToolCallPrompt = void 0;
4
+ const nanoid_1 = require("nanoid");
5
+ const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
6
+ exports.jsonToolCallPrompt = {
7
+ text() {
8
+ return {
9
+ createPrompt(instruction, tool) {
10
+ return {
11
+ system: [
12
+ `You are calling a function "${tool.name}".`,
13
+ tool.description != null
14
+ ? ` Function description: ${tool.description}`
15
+ : null,
16
+ ` Function parameters JSON schema: ${JSON.stringify(tool.parameters.getJsonSchema())}`,
17
+ ``,
18
+ `You MUST answer with a JSON object matches the above schema for the arguments.`,
19
+ ]
20
+ .filter(Boolean)
21
+ .join("\n"),
22
+ instruction,
23
+ };
24
+ },
25
+ extractToolCall(response) {
26
+ return { id: (0, nanoid_1.nanoid)(), args: (0, parseJSON_js_1.parseJSON)({ text: response }) };
27
+ },
28
+ };
29
+ },
30
+ };
@@ -0,0 +1,5 @@
1
+ import { InstructionPrompt } from "../../model-function/generate-text/prompt-template/InstructionPrompt.js";
2
+ import { ToolCallPromptTemplate } from "./TextGenerationToolCallModel.js";
3
+ export declare const jsonToolCallPrompt: {
4
+ text(): ToolCallPromptTemplate<string, InstructionPrompt>;
5
+ };
@@ -0,0 +1,27 @@
1
+ import { nanoid } from "nanoid";
2
+ import { parseJSON } from "../../core/schema/parseJSON.js";
3
+ export const jsonToolCallPrompt = {
4
+ text() {
5
+ return {
6
+ createPrompt(instruction, tool) {
7
+ return {
8
+ system: [
9
+ `You are calling a function "${tool.name}".`,
10
+ tool.description != null
11
+ ? ` Function description: ${tool.description}`
12
+ : null,
13
+ ` Function parameters JSON schema: ${JSON.stringify(tool.parameters.getJsonSchema())}`,
14
+ ``,
15
+ `You MUST answer with a JSON object matches the above schema for the arguments.`,
16
+ ]
17
+ .filter(Boolean)
18
+ .join("\n"),
19
+ instruction,
20
+ };
21
+ },
22
+ extractToolCall(response) {
23
+ return { id: nanoid(), args: parseJSON({ text: response }) };
24
+ },
25
+ };
26
+ },
27
+ };