modelfusion 0.79.0 → 0.81.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/README.md +58 -46
  2. package/core/schema/UncheckedSchema.d.ts +2 -1
  3. package/core/schema/ZodSchema.d.ts +2 -1
  4. package/core/schema/index.cjs +0 -3
  5. package/core/schema/index.d.ts +0 -3
  6. package/core/schema/index.js +0 -3
  7. package/guard/fixStructure.cjs +14 -8
  8. package/guard/fixStructure.d.ts +14 -8
  9. package/guard/fixStructure.js +14 -8
  10. package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +2 -3
  11. package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +8 -10
  12. package/model-function/generate-structure/StructureFromTextGenerationModel.js +2 -3
  13. package/model-function/generate-structure/StructureFromTextPromptFormat.d.ts +6 -0
  14. package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +69 -0
  15. package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +22 -0
  16. package/model-function/generate-structure/StructureFromTextStreamingModel.js +65 -0
  17. package/model-function/generate-structure/StructureGenerationModel.d.ts +4 -3
  18. package/model-function/generate-structure/StructureParseError.cjs +2 -10
  19. package/model-function/generate-structure/StructureParseError.d.ts +1 -4
  20. package/model-function/generate-structure/StructureParseError.js +2 -10
  21. package/model-function/generate-structure/StructureValidationError.cjs +2 -10
  22. package/model-function/generate-structure/StructureValidationError.d.ts +1 -4
  23. package/model-function/generate-structure/StructureValidationError.js +2 -10
  24. package/model-function/generate-structure/generateStructure.cjs +4 -5
  25. package/model-function/generate-structure/generateStructure.d.ts +14 -20
  26. package/model-function/generate-structure/generateStructure.js +4 -5
  27. package/model-function/generate-structure/index.cjs +3 -0
  28. package/model-function/generate-structure/index.d.ts +3 -0
  29. package/model-function/generate-structure/index.js +3 -0
  30. package/model-function/generate-structure/jsonStructurePrompt.cjs +11 -0
  31. package/model-function/generate-structure/jsonStructurePrompt.d.ts +4 -0
  32. package/model-function/generate-structure/jsonStructurePrompt.js +7 -0
  33. package/model-function/generate-structure/streamStructure.cjs +4 -4
  34. package/model-function/generate-structure/streamStructure.d.ts +18 -26
  35. package/model-function/generate-structure/streamStructure.js +4 -4
  36. package/model-function/generate-text/PromptFormatTextGenerationModel.cjs +7 -0
  37. package/model-function/generate-text/PromptFormatTextGenerationModel.d.ts +3 -0
  38. package/model-function/generate-text/PromptFormatTextGenerationModel.js +7 -0
  39. package/model-function/generate-text/PromptFormatTextStreamingModel.cjs +7 -0
  40. package/model-function/generate-text/PromptFormatTextStreamingModel.d.ts +3 -0
  41. package/model-function/generate-text/PromptFormatTextStreamingModel.js +7 -0
  42. package/model-function/generate-text/prompt-format/AlpacaPromptFormat.cjs +10 -10
  43. package/model-function/generate-text/prompt-format/AlpacaPromptFormat.d.ts +2 -2
  44. package/model-function/generate-text/prompt-format/AlpacaPromptFormat.js +10 -10
  45. package/model-function/generate-text/prompt-format/ChatMLPromptFormat.cjs +15 -13
  46. package/model-function/generate-text/prompt-format/ChatMLPromptFormat.d.ts +3 -3
  47. package/model-function/generate-text/prompt-format/ChatMLPromptFormat.js +15 -13
  48. package/model-function/generate-text/prompt-format/ChatPrompt.cjs +24 -0
  49. package/model-function/generate-text/prompt-format/ChatPrompt.d.ts +10 -4
  50. package/model-function/generate-text/prompt-format/ChatPrompt.js +22 -1
  51. package/model-function/generate-text/prompt-format/Content.cjs +2 -0
  52. package/model-function/generate-text/prompt-format/Content.d.ts +20 -0
  53. package/model-function/generate-text/prompt-format/Content.js +1 -0
  54. package/model-function/generate-text/prompt-format/InstructionPrompt.d.ts +22 -17
  55. package/model-function/generate-text/prompt-format/InvalidPromptError.cjs +28 -0
  56. package/model-function/generate-text/prompt-format/InvalidPromptError.d.ts +13 -0
  57. package/model-function/generate-text/prompt-format/InvalidPromptError.js +24 -0
  58. package/model-function/generate-text/prompt-format/Llama2PromptFormat.cjs +14 -10
  59. package/model-function/generate-text/prompt-format/Llama2PromptFormat.d.ts +3 -3
  60. package/model-function/generate-text/prompt-format/Llama2PromptFormat.js +14 -10
  61. package/model-function/generate-text/prompt-format/TextPromptFormat.cjs +11 -11
  62. package/model-function/generate-text/prompt-format/TextPromptFormat.d.ts +3 -3
  63. package/model-function/generate-text/prompt-format/TextPromptFormat.js +11 -11
  64. package/model-function/generate-text/prompt-format/VicunaPromptFormat.cjs +6 -6
  65. package/model-function/generate-text/prompt-format/VicunaPromptFormat.d.ts +1 -1
  66. package/model-function/generate-text/prompt-format/VicunaPromptFormat.js +6 -6
  67. package/model-function/generate-text/prompt-format/index.cjs +2 -2
  68. package/model-function/generate-text/prompt-format/index.d.ts +2 -2
  69. package/model-function/generate-text/prompt-format/index.js +2 -2
  70. package/model-function/generate-text/prompt-format/trimChatPrompt.cjs +2 -2
  71. package/model-function/generate-text/prompt-format/trimChatPrompt.d.ts +1 -1
  72. package/model-function/generate-text/prompt-format/trimChatPrompt.js +1 -1
  73. package/model-provider/anthropic/AnthropicPromptFormat.cjs +10 -10
  74. package/model-provider/anthropic/AnthropicPromptFormat.d.ts +3 -3
  75. package/model-provider/anthropic/AnthropicPromptFormat.js +10 -10
  76. package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +1 -1
  77. package/model-provider/cohere/CohereTextGenerationModel.d.ts +1 -1
  78. package/model-provider/llamacpp/LlamaCppBakLLaVA1Format.cjs +20 -11
  79. package/model-provider/llamacpp/LlamaCppBakLLaVA1Format.js +20 -11
  80. package/model-provider/openai/OpenAICompletionModel.d.ts +1 -1
  81. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.cjs +147 -0
  82. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts +89 -0
  83. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.js +140 -0
  84. package/model-provider/openai/chat/OpenAIChatMessage.cjs +19 -14
  85. package/model-provider/openai/chat/OpenAIChatMessage.d.ts +2 -5
  86. package/model-provider/openai/chat/OpenAIChatMessage.js +19 -14
  87. package/model-provider/openai/chat/OpenAIChatModel.cjs +16 -56
  88. package/model-provider/openai/chat/OpenAIChatModel.d.ts +10 -54
  89. package/model-provider/openai/chat/OpenAIChatModel.js +17 -54
  90. package/model-provider/openai/chat/OpenAIChatPromptFormat.cjs +19 -14
  91. package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +6 -2
  92. package/model-provider/openai/chat/OpenAIChatPromptFormat.js +17 -13
  93. package/package.json +1 -1
  94. package/tool/generate-tool-calls-or-text/generateToolCallsOrText.d.ts +2 -2
  95. package/core/schema/StructureDefinition.d.ts +0 -7
  96. package/core/schema/UncheckedStructureDefinition.cjs +0 -30
  97. package/core/schema/UncheckedStructureDefinition.d.ts +0 -12
  98. package/core/schema/UncheckedStructureDefinition.js +0 -26
  99. package/core/schema/ZodStructureDefinition.cjs +0 -30
  100. package/core/schema/ZodStructureDefinition.d.ts +0 -13
  101. package/core/schema/ZodStructureDefinition.js +0 -26
  102. package/model-function/generate-text/prompt-format/ChatPromptValidationError.cjs +0 -17
  103. package/model-function/generate-text/prompt-format/ChatPromptValidationError.d.ts +0 -8
  104. package/model-function/generate-text/prompt-format/ChatPromptValidationError.js +0 -13
  105. package/model-function/generate-text/prompt-format/validateChatPrompt.cjs +0 -24
  106. package/model-function/generate-text/prompt-format/validateChatPrompt.d.ts +0 -5
  107. package/model-function/generate-text/prompt-format/validateChatPrompt.js +0 -20
  108. /package/{core/schema/StructureDefinition.cjs → model-function/generate-structure/StructureFromTextPromptFormat.cjs} +0 -0
  109. /package/{core/schema/StructureDefinition.js → model-function/generate-structure/StructureFromTextPromptFormat.js} +0 -0
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.chat = void 0;
4
- const validateChatPrompt_js_1 = require("./validateChatPrompt.cjs");
4
+ const ChatPrompt_js_1 = require("./ChatPrompt.cjs");
5
5
  // default Vicuna 1 system message
6
6
  const DEFAULT_SYSTEM_MESSAGE = "A chat between a curious user and an artificial intelligence assistant. " +
7
7
  "The assistant gives helpful, detailed, and polite answers to the user's questions.";
@@ -20,12 +20,12 @@ const DEFAULT_SYSTEM_MESSAGE = "A chat between a curious user and an artificial
20
20
  */
21
21
  function chat() {
22
22
  return {
23
- format: (chatPrompt) => {
24
- (0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
25
- let text = chatPrompt.system != null
26
- ? `${chatPrompt.system}\n\n`
23
+ format(prompt) {
24
+ (0, ChatPrompt_js_1.validateChatPrompt)(prompt);
25
+ let text = prompt.system != null
26
+ ? `${prompt.system}\n\n`
27
27
  : `${DEFAULT_SYSTEM_MESSAGE}\n\n`;
28
- for (const { role, content } of chatPrompt.messages) {
28
+ for (const { role, content } of prompt.messages) {
29
29
  switch (role) {
30
30
  case "user": {
31
31
  text += `USER: ${content}\n`;
@@ -1,5 +1,5 @@
1
- import { ChatPrompt } from "./ChatPrompt.js";
2
1
  import { TextGenerationPromptFormat } from "../TextGenerationPromptFormat.js";
2
+ import { ChatPrompt } from "./ChatPrompt.js";
3
3
  /**
4
4
  * Formats a chat prompt as a Vicuna prompt.
5
5
  *
@@ -1,4 +1,4 @@
1
- import { validateChatPrompt } from "./validateChatPrompt.js";
1
+ import { validateChatPrompt } from "./ChatPrompt.js";
2
2
  // default Vicuna 1 system message
3
3
  const DEFAULT_SYSTEM_MESSAGE = "A chat between a curious user and an artificial intelligence assistant. " +
4
4
  "The assistant gives helpful, detailed, and polite answers to the user's questions.";
@@ -17,12 +17,12 @@ const DEFAULT_SYSTEM_MESSAGE = "A chat between a curious user and an artificial
17
17
  */
18
18
  export function chat() {
19
19
  return {
20
- format: (chatPrompt) => {
21
- validateChatPrompt(chatPrompt);
22
- let text = chatPrompt.system != null
23
- ? `${chatPrompt.system}\n\n`
20
+ format(prompt) {
21
+ validateChatPrompt(prompt);
22
+ let text = prompt.system != null
23
+ ? `${prompt.system}\n\n`
24
24
  : `${DEFAULT_SYSTEM_MESSAGE}\n\n`;
25
- for (const { role, content } of chatPrompt.messages) {
25
+ for (const { role, content } of prompt.messages) {
26
26
  switch (role) {
27
27
  case "user": {
28
28
  text += `USER: ${content}\n`;
@@ -30,10 +30,10 @@ exports.VicunaPromptFormat = exports.TextPromptFormat = exports.Llama2PromptForm
30
30
  exports.AlpacaPromptFormat = __importStar(require("./AlpacaPromptFormat.cjs"));
31
31
  exports.ChatMLPromptFormat = __importStar(require("./ChatMLPromptFormat.cjs"));
32
32
  __exportStar(require("./ChatPrompt.cjs"), exports);
33
- __exportStar(require("./ChatPromptValidationError.cjs"), exports);
33
+ __exportStar(require("./Content.cjs"), exports);
34
34
  __exportStar(require("./InstructionPrompt.cjs"), exports);
35
35
  exports.Llama2PromptFormat = __importStar(require("./Llama2PromptFormat.cjs"));
36
+ __exportStar(require("./InvalidPromptError.cjs"), exports);
36
37
  exports.TextPromptFormat = __importStar(require("./TextPromptFormat.cjs"));
37
38
  exports.VicunaPromptFormat = __importStar(require("./VicunaPromptFormat.cjs"));
38
39
  __exportStar(require("./trimChatPrompt.cjs"), exports);
39
- __exportStar(require("./validateChatPrompt.cjs"), exports);
@@ -1,10 +1,10 @@
1
1
  export * as AlpacaPromptFormat from "./AlpacaPromptFormat.js";
2
2
  export * as ChatMLPromptFormat from "./ChatMLPromptFormat.js";
3
3
  export * from "./ChatPrompt.js";
4
- export * from "./ChatPromptValidationError.js";
4
+ export * from "./Content.js";
5
5
  export * from "./InstructionPrompt.js";
6
6
  export * as Llama2PromptFormat from "./Llama2PromptFormat.js";
7
+ export * from "./InvalidPromptError.js";
7
8
  export * as TextPromptFormat from "./TextPromptFormat.js";
8
9
  export * as VicunaPromptFormat from "./VicunaPromptFormat.js";
9
10
  export * from "./trimChatPrompt.js";
10
- export * from "./validateChatPrompt.js";
@@ -1,10 +1,10 @@
1
1
  export * as AlpacaPromptFormat from "./AlpacaPromptFormat.js";
2
2
  export * as ChatMLPromptFormat from "./ChatMLPromptFormat.js";
3
3
  export * from "./ChatPrompt.js";
4
- export * from "./ChatPromptValidationError.js";
4
+ export * from "./Content.js";
5
5
  export * from "./InstructionPrompt.js";
6
6
  export * as Llama2PromptFormat from "./Llama2PromptFormat.js";
7
+ export * from "./InvalidPromptError.js";
7
8
  export * as TextPromptFormat from "./TextPromptFormat.js";
8
9
  export * as VicunaPromptFormat from "./VicunaPromptFormat.js";
9
10
  export * from "./trimChatPrompt.js";
10
- export * from "./validateChatPrompt.js";
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.trimChatPrompt = void 0;
4
- const validateChatPrompt_js_1 = require("./validateChatPrompt.cjs");
4
+ const ChatPrompt_js_1 = require("./ChatPrompt.cjs");
5
5
  /**
6
6
  * Keeps only the most recent messages in the prompt, while leaving enough space for the completion.
7
7
  *
@@ -14,7 +14,7 @@ const validateChatPrompt_js_1 = require("./validateChatPrompt.cjs");
14
14
  */
15
15
  async function trimChatPrompt({ prompt, model, tokenLimit = model.contextWindowSize -
16
16
  (model.settings.maxCompletionTokens ?? model.contextWindowSize / 4), }) {
17
- (0, validateChatPrompt_js_1.validateChatPrompt)(prompt);
17
+ (0, ChatPrompt_js_1.validateChatPrompt)(prompt);
18
18
  let minimalPrompt = {
19
19
  system: prompt.system,
20
20
  messages: [prompt.messages[prompt.messages.length - 1]], // last user message
@@ -1,5 +1,5 @@
1
- import { ChatPrompt } from "./ChatPrompt.js";
2
1
  import { HasContextWindowSize, HasTokenizer, TextGenerationModel, TextGenerationModelSettings } from "../TextGenerationModel.js";
2
+ import { ChatPrompt } from "./ChatPrompt.js";
3
3
  /**
4
4
  * Keeps only the most recent messages in the prompt, while leaving enough space for the completion.
5
5
  *
@@ -1,4 +1,4 @@
1
- import { validateChatPrompt } from "./validateChatPrompt.js";
1
+ import { validateChatPrompt } from "./ChatPrompt.js";
2
2
  /**
3
3
  * Keeps only the most recent messages in the prompt, while leaving enough space for the completion.
4
4
  *
@@ -1,16 +1,16 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.chat = exports.instruction = exports.text = void 0;
4
- const validateChatPrompt_js_1 = require("../../model-function/generate-text/prompt-format/validateChatPrompt.cjs");
4
+ const ChatPrompt_js_1 = require("../../model-function/generate-text/prompt-format/ChatPrompt.cjs");
5
5
  /**
6
6
  * Formats a text prompt as an Anthropic prompt.
7
7
  */
8
8
  function text() {
9
9
  return {
10
- format: (instruction) => {
10
+ format(prompt) {
11
11
  let text = "";
12
12
  text += "\n\nHuman:";
13
- text += instruction;
13
+ text += prompt;
14
14
  text += "\n\nAssistant:";
15
15
  return text;
16
16
  },
@@ -23,10 +23,10 @@ exports.text = text;
23
23
  */
24
24
  function instruction() {
25
25
  return {
26
- format: (instruction) => {
27
- let text = instruction.system ?? "";
26
+ format(prompt) {
27
+ let text = prompt.system ?? "";
28
28
  text += "\n\nHuman:";
29
- text += instruction.instruction;
29
+ text += prompt.instruction;
30
30
  text += "\n\nAssistant:";
31
31
  return text;
32
32
  },
@@ -41,10 +41,10 @@ exports.instruction = instruction;
41
41
  */
42
42
  function chat() {
43
43
  return {
44
- format: (chatPrompt) => {
45
- (0, validateChatPrompt_js_1.validateChatPrompt)(chatPrompt);
46
- let text = chatPrompt.system ?? "";
47
- for (const { role, content } of chatPrompt.messages) {
44
+ format(prompt) {
45
+ (0, ChatPrompt_js_1.validateChatPrompt)(prompt);
46
+ let text = prompt.system ?? "";
47
+ for (const { role, content } of prompt.messages) {
48
48
  switch (role) {
49
49
  case "user": {
50
50
  text += `\n\nHuman:${content}`;
@@ -1,6 +1,6 @@
1
- import { ChatPrompt } from "../../model-function/generate-text/prompt-format/ChatPrompt.js";
2
- import { InstructionPrompt } from "../../model-function/generate-text/prompt-format/InstructionPrompt.js";
3
1
  import { TextGenerationPromptFormat } from "../../model-function/generate-text/TextGenerationPromptFormat.js";
2
+ import { ChatPrompt } from "../../model-function/generate-text/prompt-format/ChatPrompt.js";
3
+ import { TextInstructionPrompt } from "../../model-function/generate-text/prompt-format/InstructionPrompt.js";
4
4
  /**
5
5
  * Formats a text prompt as an Anthropic prompt.
6
6
  */
@@ -8,7 +8,7 @@ export declare function text(): TextGenerationPromptFormat<string, string>;
8
8
  /**
9
9
  * Formats an instruction prompt as an Anthropic prompt.
10
10
  */
11
- export declare function instruction(): TextGenerationPromptFormat<InstructionPrompt, string>;
11
+ export declare function instruction(): TextGenerationPromptFormat<TextInstructionPrompt, string>;
12
12
  /**
13
13
  * Formats a chat prompt as an Anthropic prompt.
14
14
  *
@@ -1,13 +1,13 @@
1
- import { validateChatPrompt } from "../../model-function/generate-text/prompt-format/validateChatPrompt.js";
1
+ import { validateChatPrompt, } from "../../model-function/generate-text/prompt-format/ChatPrompt.js";
2
2
  /**
3
3
  * Formats a text prompt as an Anthropic prompt.
4
4
  */
5
5
  export function text() {
6
6
  return {
7
- format: (instruction) => {
7
+ format(prompt) {
8
8
  let text = "";
9
9
  text += "\n\nHuman:";
10
- text += instruction;
10
+ text += prompt;
11
11
  text += "\n\nAssistant:";
12
12
  return text;
13
13
  },
@@ -19,10 +19,10 @@ export function text() {
19
19
  */
20
20
  export function instruction() {
21
21
  return {
22
- format: (instruction) => {
23
- let text = instruction.system ?? "";
22
+ format(prompt) {
23
+ let text = prompt.system ?? "";
24
24
  text += "\n\nHuman:";
25
- text += instruction.instruction;
25
+ text += prompt.instruction;
26
26
  text += "\n\nAssistant:";
27
27
  return text;
28
28
  },
@@ -36,10 +36,10 @@ export function instruction() {
36
36
  */
37
37
  export function chat() {
38
38
  return {
39
- format: (chatPrompt) => {
40
- validateChatPrompt(chatPrompt);
41
- let text = chatPrompt.system ?? "";
42
- for (const { role, content } of chatPrompt.messages) {
39
+ format(prompt) {
40
+ validateChatPrompt(prompt);
41
+ let text = prompt.system ?? "";
42
+ for (const { role, content } of prompt.messages) {
43
43
  switch (role) {
44
44
  case "user": {
45
45
  text += `\n\nHuman:${content}`;
@@ -65,7 +65,7 @@ export declare class AnthropicTextGenerationModel extends AbstractModel<Anthropi
65
65
  /**
66
66
  * Returns this model with an instruction prompt format.
67
67
  */
68
- withInstructionPrompt(): PromptFormatTextStreamingModel<import("../../index.js").InstructionPrompt, string, AnthropicTextGenerationModelSettings, this>;
68
+ withInstructionPrompt(): PromptFormatTextStreamingModel<import("../../index.js").TextInstructionPrompt, string, AnthropicTextGenerationModelSettings, this>;
69
69
  /**
70
70
  * Returns this model with a chat prompt format.
71
71
  */
@@ -87,7 +87,7 @@ export declare class CohereTextGenerationModel extends AbstractModel<CohereTextG
87
87
  /**
88
88
  * Returns this model with an instruction prompt format.
89
89
  */
90
- withInstructionPrompt(): PromptFormatTextStreamingModel<import("../../index.js").InstructionPrompt, string, CohereTextGenerationModelSettings, this>;
90
+ withInstructionPrompt(): PromptFormatTextStreamingModel<import("../../index.js").TextInstructionPrompt, string, CohereTextGenerationModelSettings, this>;
91
91
  /**
92
92
  * Returns this model with a chat prompt format.
93
93
  */
@@ -11,21 +11,30 @@ const DEFAULT_SYSTEM_MESSAGE = "A chat between a curious user and an artificial
11
11
  */
12
12
  function instruction() {
13
13
  return {
14
- format: (instruction) => {
14
+ format(prompt) {
15
15
  let text = "";
16
- text += `${instruction.system ?? DEFAULT_SYSTEM_MESSAGE}\n\n`;
16
+ text += `${prompt.system ?? DEFAULT_SYSTEM_MESSAGE}\n\n`;
17
17
  text += `USER: `;
18
- if (instruction.image != null) {
19
- text += `[img-1]\n`;
18
+ // construct text and image mapping:
19
+ let imageCounter = 1;
20
+ const images = {};
21
+ for (const content of prompt.instruction) {
22
+ switch (content.type) {
23
+ case "text": {
24
+ text += content.text;
25
+ break;
26
+ }
27
+ case "image": {
28
+ text += `[img-${imageCounter}]`;
29
+ images[imageCounter.toString()] = content.base64Image;
30
+ imageCounter++;
31
+ break;
32
+ }
33
+ }
34
+ text += `${content}\n`;
20
35
  }
21
- text += `${instruction.instruction}\n`;
22
36
  text += `ASSISTANT: `;
23
- return {
24
- text,
25
- images: instruction.image != null
26
- ? { "1": instruction.image.base64Content }
27
- : undefined,
28
- };
37
+ return { text, images };
29
38
  },
30
39
  stopSequences: [`\nUSER:`],
31
40
  };
@@ -8,21 +8,30 @@ const DEFAULT_SYSTEM_MESSAGE = "A chat between a curious user and an artificial
8
8
  */
9
9
  export function instruction() {
10
10
  return {
11
- format: (instruction) => {
11
+ format(prompt) {
12
12
  let text = "";
13
- text += `${instruction.system ?? DEFAULT_SYSTEM_MESSAGE}\n\n`;
13
+ text += `${prompt.system ?? DEFAULT_SYSTEM_MESSAGE}\n\n`;
14
14
  text += `USER: `;
15
- if (instruction.image != null) {
16
- text += `[img-1]\n`;
15
+ // construct text and image mapping:
16
+ let imageCounter = 1;
17
+ const images = {};
18
+ for (const content of prompt.instruction) {
19
+ switch (content.type) {
20
+ case "text": {
21
+ text += content.text;
22
+ break;
23
+ }
24
+ case "image": {
25
+ text += `[img-${imageCounter}]`;
26
+ images[imageCounter.toString()] = content.base64Image;
27
+ imageCounter++;
28
+ break;
29
+ }
30
+ }
31
+ text += `${content}\n`;
17
32
  }
18
- text += `${instruction.instruction}\n`;
19
33
  text += `ASSISTANT: `;
20
- return {
21
- text,
22
- images: instruction.image != null
23
- ? { "1": instruction.image.base64Content }
24
- : undefined,
25
- };
34
+ return { text, images };
26
35
  },
27
36
  stopSequences: [`\nUSER:`],
28
37
  };
@@ -176,7 +176,7 @@ export declare class OpenAICompletionModel extends AbstractModel<OpenAICompletio
176
176
  /**
177
177
  * Returns this model with an instruction prompt format.
178
178
  */
179
- withInstructionPrompt(): PromptFormatTextStreamingModel<import("../../index.js").InstructionPrompt, string, OpenAICompletionModelSettings, this>;
179
+ withInstructionPrompt(): PromptFormatTextStreamingModel<import("../../index.js").TextInstructionPrompt, string, OpenAICompletionModelSettings, this>;
180
180
  /**
181
181
  * Returns this model with a chat prompt format.
182
182
  */
@@ -0,0 +1,147 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.OpenAIChatFunctionCallStructureGenerationModel = void 0;
7
+ const secure_json_parse_1 = __importDefault(require("secure-json-parse"));
8
+ const StructureParseError_js_1 = require("../../../model-function/generate-structure/StructureParseError.cjs");
9
+ const OpenAIChatModel_1 = require("./OpenAIChatModel");
10
+ const OpenAIChatPromptFormat_js_1 = require("./OpenAIChatPromptFormat.cjs");
11
+ class OpenAIChatFunctionCallStructureGenerationModel {
12
+ constructor({ model, fnName, fnDescription, promptFormat, }) {
13
+ Object.defineProperty(this, "model", {
14
+ enumerable: true,
15
+ configurable: true,
16
+ writable: true,
17
+ value: void 0
18
+ });
19
+ Object.defineProperty(this, "fnName", {
20
+ enumerable: true,
21
+ configurable: true,
22
+ writable: true,
23
+ value: void 0
24
+ });
25
+ Object.defineProperty(this, "fnDescription", {
26
+ enumerable: true,
27
+ configurable: true,
28
+ writable: true,
29
+ value: void 0
30
+ });
31
+ Object.defineProperty(this, "promptFormat", {
32
+ enumerable: true,
33
+ configurable: true,
34
+ writable: true,
35
+ value: void 0
36
+ });
37
+ this.model = model;
38
+ this.fnName = fnName;
39
+ this.fnDescription = fnDescription;
40
+ this.promptFormat = promptFormat;
41
+ }
42
+ get modelInformation() {
43
+ return this.model.modelInformation;
44
+ }
45
+ get settings() {
46
+ return this.model.settings;
47
+ }
48
+ get settingsForEvent() {
49
+ return this.model.settingsForEvent;
50
+ }
51
+ /**
52
+ * Returns this model with a text prompt format.
53
+ */
54
+ withTextPrompt() {
55
+ return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.text)());
56
+ }
57
+ /**
58
+ * Returns this model with an instruction prompt format.
59
+ */
60
+ withInstructionPrompt() {
61
+ return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.instruction)());
62
+ }
63
+ /**
64
+ * Returns this model with a chat prompt format.
65
+ */
66
+ withChatPrompt() {
67
+ return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.chat)());
68
+ }
69
+ withPromptFormat(promptFormat) {
70
+ return new OpenAIChatFunctionCallStructureGenerationModel({
71
+ model: this.model,
72
+ fnName: this.fnName,
73
+ fnDescription: this.fnDescription,
74
+ promptFormat,
75
+ });
76
+ }
77
+ withSettings(additionalSettings) {
78
+ return new OpenAIChatFunctionCallStructureGenerationModel({
79
+ model: this.model.withSettings(additionalSettings),
80
+ fnName: this.fnName,
81
+ fnDescription: this.fnDescription,
82
+ promptFormat: this.promptFormat,
83
+ });
84
+ }
85
+ /**
86
+ * JSON generation uses the OpenAI GPT function calling API.
87
+ * It provides a single function specification and instructs the model to provide parameters for calling the function.
88
+ * The result is returned as parsed JSON.
89
+ *
90
+ * @see https://platform.openai.com/docs/guides/gpt/function-calling
91
+ */
92
+ async doGenerateStructure(schema, prompt, // first argument of the function
93
+ options) {
94
+ const expandedPrompt = this.promptFormat.format(prompt);
95
+ const response = await this.model
96
+ .withSettings({
97
+ stopSequences: [
98
+ ...(this.settings.stopSequences ?? []),
99
+ ...this.promptFormat.stopSequences,
100
+ ],
101
+ })
102
+ .callAPI(expandedPrompt, {
103
+ ...options,
104
+ responseFormat: OpenAIChatModel_1.OpenAIChatResponseFormat.json,
105
+ functionCall: { name: this.fnName },
106
+ functions: [
107
+ {
108
+ name: this.fnName,
109
+ description: this.fnDescription,
110
+ parameters: schema.getJsonSchema(),
111
+ },
112
+ ],
113
+ });
114
+ const valueText = response.choices[0].message.function_call.arguments;
115
+ try {
116
+ return {
117
+ response,
118
+ valueText,
119
+ value: secure_json_parse_1.default.parse(valueText),
120
+ usage: this.model.extractUsage(response),
121
+ };
122
+ }
123
+ catch (error) {
124
+ throw new StructureParseError_js_1.StructureParseError({
125
+ valueText,
126
+ cause: error,
127
+ });
128
+ }
129
+ }
130
+ async doStreamStructure(schema, prompt, // first argument of the function
131
+ options) {
132
+ const expandedPrompt = this.promptFormat.format(prompt);
133
+ return this.model.callAPI(expandedPrompt, {
134
+ ...options,
135
+ responseFormat: OpenAIChatModel_1.OpenAIChatResponseFormat.structureDeltaIterable,
136
+ functionCall: { name: this.fnName },
137
+ functions: [
138
+ {
139
+ name: this.fnName,
140
+ description: this.fnDescription,
141
+ parameters: schema.getJsonSchema(),
142
+ },
143
+ ],
144
+ });
145
+ }
146
+ }
147
+ exports.OpenAIChatFunctionCallStructureGenerationModel = OpenAIChatFunctionCallStructureGenerationModel;
@@ -0,0 +1,89 @@
1
+ import { FunctionOptions } from "../../../core/FunctionOptions.js";
2
+ import { JsonSchemaProducer } from "../../../core/schema/JsonSchemaProducer.js";
3
+ import { Schema } from "../../../core/schema/Schema.js";
4
+ import { StructureGenerationModel } from "../../../model-function/generate-structure/StructureGenerationModel.js";
5
+ import { TextGenerationPromptFormat } from "../../../model-function/generate-text/TextGenerationPromptFormat.js";
6
+ import { OpenAIChatMessage } from "./OpenAIChatMessage";
7
+ import { OpenAIChatModel, OpenAIChatSettings } from "./OpenAIChatModel";
8
+ export declare class OpenAIChatFunctionCallStructureGenerationModel<PROMPT_FORMAT extends TextGenerationPromptFormat<unknown, OpenAIChatMessage[]>> implements StructureGenerationModel<Parameters<PROMPT_FORMAT["format"]>[0], // first argument of the function
9
+ OpenAIChatSettings> {
10
+ readonly model: OpenAIChatModel;
11
+ readonly fnName: string;
12
+ readonly fnDescription?: string;
13
+ readonly promptFormat: PROMPT_FORMAT;
14
+ constructor({ model, fnName, fnDescription, promptFormat, }: {
15
+ model: OpenAIChatModel;
16
+ fnName: string;
17
+ fnDescription?: string;
18
+ promptFormat: PROMPT_FORMAT;
19
+ });
20
+ get modelInformation(): import("../../../index.js").ModelInformation;
21
+ get settings(): OpenAIChatSettings;
22
+ get settingsForEvent(): Partial<OpenAIChatSettings>;
23
+ /**
24
+ * Returns this model with a text prompt format.
25
+ */
26
+ withTextPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptFormat<string, OpenAIChatMessage[]>>;
27
+ /**
28
+ * Returns this model with an instruction prompt format.
29
+ */
30
+ withInstructionPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptFormat<import("../../../index.js").InstructionPrompt | import("../../../index.js").TextInstructionPrompt, OpenAIChatMessage[]>>;
31
+ /**
32
+ * Returns this model with a chat prompt format.
33
+ */
34
+ withChatPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptFormat<import("../../../index.js").ChatPrompt, OpenAIChatMessage[]>>;
35
+ withPromptFormat<TARGET_PROMPT_FORMAT extends TextGenerationPromptFormat<unknown, OpenAIChatMessage[]>>(promptFormat: TARGET_PROMPT_FORMAT): OpenAIChatFunctionCallStructureGenerationModel<TARGET_PROMPT_FORMAT>;
36
+ withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
37
+ /**
38
+ * JSON generation uses the OpenAI GPT function calling API.
39
+ * It provides a single function specification and instructs the model to provide parameters for calling the function.
40
+ * The result is returned as parsed JSON.
41
+ *
42
+ * @see https://platform.openai.com/docs/guides/gpt/function-calling
43
+ */
44
+ doGenerateStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: Parameters<PROMPT_FORMAT["format"]>[0], // first argument of the function
45
+ options?: FunctionOptions): Promise<{
46
+ response: {
47
+ object: "chat.completion";
48
+ usage: {
49
+ prompt_tokens: number;
50
+ total_tokens: number;
51
+ completion_tokens: number;
52
+ };
53
+ model: string;
54
+ id: string;
55
+ choices: {
56
+ message: {
57
+ role: "assistant";
58
+ content: string | null;
59
+ function_call?: {
60
+ name: string;
61
+ arguments: string;
62
+ } | undefined;
63
+ tool_calls?: {
64
+ function: {
65
+ name: string;
66
+ arguments: string;
67
+ };
68
+ type: "function";
69
+ id: string;
70
+ }[] | undefined;
71
+ };
72
+ index: number;
73
+ logprobs?: any;
74
+ finish_reason?: "length" | "stop" | "tool_calls" | "function_call" | "content_filter" | null | undefined;
75
+ }[];
76
+ created: number;
77
+ system_fingerprint?: string | undefined;
78
+ };
79
+ valueText: string;
80
+ value: any;
81
+ usage: {
82
+ promptTokens: number;
83
+ completionTokens: number;
84
+ totalTokens: number;
85
+ };
86
+ }>;
87
+ doStreamStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: Parameters<PROMPT_FORMAT["format"]>[0], // first argument of the function
88
+ options?: FunctionOptions): Promise<AsyncIterable<import("../../../index.js").Delta<unknown>>>;
89
+ }