modelfusion 0.80.0 → 0.82.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/README.md +55 -44
  2. package/core/schema/UncheckedSchema.d.ts +2 -1
  3. package/core/schema/ZodSchema.cjs +5 -1
  4. package/core/schema/ZodSchema.d.ts +3 -1
  5. package/core/schema/ZodSchema.js +3 -0
  6. package/core/schema/index.cjs +0 -3
  7. package/core/schema/index.d.ts +0 -3
  8. package/core/schema/index.js +0 -3
  9. package/guard/fixStructure.cjs +14 -8
  10. package/guard/fixStructure.d.ts +14 -8
  11. package/guard/fixStructure.js +14 -8
  12. package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +2 -3
  13. package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +8 -10
  14. package/model-function/generate-structure/StructureFromTextGenerationModel.js +2 -3
  15. package/model-function/generate-structure/StructureFromTextPromptFormat.d.ts +6 -0
  16. package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +69 -0
  17. package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +22 -0
  18. package/model-function/generate-structure/StructureFromTextStreamingModel.js +65 -0
  19. package/model-function/generate-structure/StructureGenerationModel.d.ts +4 -3
  20. package/model-function/generate-structure/StructureParseError.cjs +2 -10
  21. package/model-function/generate-structure/StructureParseError.d.ts +1 -4
  22. package/model-function/generate-structure/StructureParseError.js +2 -10
  23. package/model-function/generate-structure/StructureValidationError.cjs +2 -10
  24. package/model-function/generate-structure/StructureValidationError.d.ts +1 -4
  25. package/model-function/generate-structure/StructureValidationError.js +2 -10
  26. package/model-function/generate-structure/generateStructure.cjs +4 -5
  27. package/model-function/generate-structure/generateStructure.d.ts +14 -20
  28. package/model-function/generate-structure/generateStructure.js +4 -5
  29. package/model-function/generate-structure/index.cjs +3 -0
  30. package/model-function/generate-structure/index.d.ts +3 -0
  31. package/model-function/generate-structure/index.js +3 -0
  32. package/model-function/generate-structure/jsonStructurePrompt.cjs +11 -0
  33. package/model-function/generate-structure/jsonStructurePrompt.d.ts +4 -0
  34. package/model-function/generate-structure/jsonStructurePrompt.js +7 -0
  35. package/model-function/generate-structure/streamStructure.cjs +4 -4
  36. package/model-function/generate-structure/streamStructure.d.ts +18 -26
  37. package/model-function/generate-structure/streamStructure.js +4 -4
  38. package/model-function/generate-text/PromptFormatTextGenerationModel.cjs +7 -0
  39. package/model-function/generate-text/PromptFormatTextGenerationModel.d.ts +3 -0
  40. package/model-function/generate-text/PromptFormatTextGenerationModel.js +7 -0
  41. package/model-function/generate-text/PromptFormatTextStreamingModel.cjs +7 -0
  42. package/model-function/generate-text/PromptFormatTextStreamingModel.d.ts +3 -0
  43. package/model-function/generate-text/PromptFormatTextStreamingModel.js +7 -0
  44. package/model-function/generate-text/prompt-format/ChatPrompt.d.ts +4 -4
  45. package/model-function/generate-text/prompt-format/Content.d.ts +4 -4
  46. package/model-function/generate-text/prompt-format/InstructionPrompt.d.ts +4 -4
  47. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.cjs +147 -0
  48. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts +89 -0
  49. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.js +140 -0
  50. package/model-provider/openai/chat/OpenAIChatModel.cjs +16 -56
  51. package/model-provider/openai/chat/OpenAIChatModel.d.ts +9 -53
  52. package/model-provider/openai/chat/OpenAIChatModel.js +17 -54
  53. package/model-provider/openai/chat/OpenAIChatPromptFormat.cjs +8 -1
  54. package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +4 -0
  55. package/model-provider/openai/chat/OpenAIChatPromptFormat.js +6 -0
  56. package/package.json +1 -1
  57. package/tool/generate-tool-calls-or-text/generateToolCallsOrText.d.ts +2 -2
  58. package/core/schema/StructureDefinition.d.ts +0 -7
  59. package/core/schema/UncheckedStructureDefinition.cjs +0 -30
  60. package/core/schema/UncheckedStructureDefinition.d.ts +0 -12
  61. package/core/schema/UncheckedStructureDefinition.js +0 -26
  62. package/core/schema/ZodStructureDefinition.cjs +0 -30
  63. package/core/schema/ZodStructureDefinition.d.ts +0 -13
  64. package/core/schema/ZodStructureDefinition.js +0 -26
  65. /package/{core/schema/StructureDefinition.cjs → model-function/generate-structure/StructureFromTextPromptFormat.cjs} +0 -0
  66. /package/{core/schema/StructureDefinition.js → model-function/generate-structure/StructureFromTextPromptFormat.js} +0 -0
@@ -0,0 +1,147 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.OpenAIChatFunctionCallStructureGenerationModel = void 0;
7
+ const secure_json_parse_1 = __importDefault(require("secure-json-parse"));
8
+ const StructureParseError_js_1 = require("../../../model-function/generate-structure/StructureParseError.cjs");
9
+ const OpenAIChatModel_1 = require("./OpenAIChatModel");
10
+ const OpenAIChatPromptFormat_js_1 = require("./OpenAIChatPromptFormat.cjs");
11
+ class OpenAIChatFunctionCallStructureGenerationModel {
12
+ constructor({ model, fnName, fnDescription, promptFormat, }) {
13
+ Object.defineProperty(this, "model", {
14
+ enumerable: true,
15
+ configurable: true,
16
+ writable: true,
17
+ value: void 0
18
+ });
19
+ Object.defineProperty(this, "fnName", {
20
+ enumerable: true,
21
+ configurable: true,
22
+ writable: true,
23
+ value: void 0
24
+ });
25
+ Object.defineProperty(this, "fnDescription", {
26
+ enumerable: true,
27
+ configurable: true,
28
+ writable: true,
29
+ value: void 0
30
+ });
31
+ Object.defineProperty(this, "promptFormat", {
32
+ enumerable: true,
33
+ configurable: true,
34
+ writable: true,
35
+ value: void 0
36
+ });
37
+ this.model = model;
38
+ this.fnName = fnName;
39
+ this.fnDescription = fnDescription;
40
+ this.promptFormat = promptFormat;
41
+ }
42
+ get modelInformation() {
43
+ return this.model.modelInformation;
44
+ }
45
+ get settings() {
46
+ return this.model.settings;
47
+ }
48
+ get settingsForEvent() {
49
+ return this.model.settingsForEvent;
50
+ }
51
+ /**
52
+ * Returns this model with a text prompt format.
53
+ */
54
+ withTextPrompt() {
55
+ return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.text)());
56
+ }
57
+ /**
58
+ * Returns this model with an instruction prompt format.
59
+ */
60
+ withInstructionPrompt() {
61
+ return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.instruction)());
62
+ }
63
+ /**
64
+ * Returns this model with a chat prompt format.
65
+ */
66
+ withChatPrompt() {
67
+ return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.chat)());
68
+ }
69
+ withPromptFormat(promptFormat) {
70
+ return new OpenAIChatFunctionCallStructureGenerationModel({
71
+ model: this.model,
72
+ fnName: this.fnName,
73
+ fnDescription: this.fnDescription,
74
+ promptFormat,
75
+ });
76
+ }
77
+ withSettings(additionalSettings) {
78
+ return new OpenAIChatFunctionCallStructureGenerationModel({
79
+ model: this.model.withSettings(additionalSettings),
80
+ fnName: this.fnName,
81
+ fnDescription: this.fnDescription,
82
+ promptFormat: this.promptFormat,
83
+ });
84
+ }
85
+ /**
86
+ * JSON generation uses the OpenAI GPT function calling API.
87
+ * It provides a single function specification and instructs the model to provide parameters for calling the function.
88
+ * The result is returned as parsed JSON.
89
+ *
90
+ * @see https://platform.openai.com/docs/guides/gpt/function-calling
91
+ */
92
+ async doGenerateStructure(schema, prompt, // first argument of the function
93
+ options) {
94
+ const expandedPrompt = this.promptFormat.format(prompt);
95
+ const response = await this.model
96
+ .withSettings({
97
+ stopSequences: [
98
+ ...(this.settings.stopSequences ?? []),
99
+ ...this.promptFormat.stopSequences,
100
+ ],
101
+ })
102
+ .callAPI(expandedPrompt, {
103
+ ...options,
104
+ responseFormat: OpenAIChatModel_1.OpenAIChatResponseFormat.json,
105
+ functionCall: { name: this.fnName },
106
+ functions: [
107
+ {
108
+ name: this.fnName,
109
+ description: this.fnDescription,
110
+ parameters: schema.getJsonSchema(),
111
+ },
112
+ ],
113
+ });
114
+ const valueText = response.choices[0].message.function_call.arguments;
115
+ try {
116
+ return {
117
+ response,
118
+ valueText,
119
+ value: secure_json_parse_1.default.parse(valueText),
120
+ usage: this.model.extractUsage(response),
121
+ };
122
+ }
123
+ catch (error) {
124
+ throw new StructureParseError_js_1.StructureParseError({
125
+ valueText,
126
+ cause: error,
127
+ });
128
+ }
129
+ }
130
+ async doStreamStructure(schema, prompt, // first argument of the function
131
+ options) {
132
+ const expandedPrompt = this.promptFormat.format(prompt);
133
+ return this.model.callAPI(expandedPrompt, {
134
+ ...options,
135
+ responseFormat: OpenAIChatModel_1.OpenAIChatResponseFormat.structureDeltaIterable,
136
+ functionCall: { name: this.fnName },
137
+ functions: [
138
+ {
139
+ name: this.fnName,
140
+ description: this.fnDescription,
141
+ parameters: schema.getJsonSchema(),
142
+ },
143
+ ],
144
+ });
145
+ }
146
+ }
147
+ exports.OpenAIChatFunctionCallStructureGenerationModel = OpenAIChatFunctionCallStructureGenerationModel;
@@ -0,0 +1,89 @@
1
+ import { FunctionOptions } from "../../../core/FunctionOptions.js";
2
+ import { JsonSchemaProducer } from "../../../core/schema/JsonSchemaProducer.js";
3
+ import { Schema } from "../../../core/schema/Schema.js";
4
+ import { StructureGenerationModel } from "../../../model-function/generate-structure/StructureGenerationModel.js";
5
+ import { TextGenerationPromptFormat } from "../../../model-function/generate-text/TextGenerationPromptFormat.js";
6
+ import { OpenAIChatMessage } from "./OpenAIChatMessage";
7
+ import { OpenAIChatModel, OpenAIChatSettings } from "./OpenAIChatModel";
8
+ export declare class OpenAIChatFunctionCallStructureGenerationModel<PROMPT_FORMAT extends TextGenerationPromptFormat<unknown, OpenAIChatMessage[]>> implements StructureGenerationModel<Parameters<PROMPT_FORMAT["format"]>[0], // first argument of the function
9
+ OpenAIChatSettings> {
10
+ readonly model: OpenAIChatModel;
11
+ readonly fnName: string;
12
+ readonly fnDescription?: string;
13
+ readonly promptFormat: PROMPT_FORMAT;
14
+ constructor({ model, fnName, fnDescription, promptFormat, }: {
15
+ model: OpenAIChatModel;
16
+ fnName: string;
17
+ fnDescription?: string;
18
+ promptFormat: PROMPT_FORMAT;
19
+ });
20
+ get modelInformation(): import("../../../index.js").ModelInformation;
21
+ get settings(): OpenAIChatSettings;
22
+ get settingsForEvent(): Partial<OpenAIChatSettings>;
23
+ /**
24
+ * Returns this model with a text prompt format.
25
+ */
26
+ withTextPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptFormat<string, OpenAIChatMessage[]>>;
27
+ /**
28
+ * Returns this model with an instruction prompt format.
29
+ */
30
+ withInstructionPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptFormat<import("../../../index.js").InstructionPrompt | import("../../../index.js").TextInstructionPrompt, OpenAIChatMessage[]>>;
31
+ /**
32
+ * Returns this model with a chat prompt format.
33
+ */
34
+ withChatPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptFormat<import("../../../index.js").ChatPrompt, OpenAIChatMessage[]>>;
35
+ withPromptFormat<TARGET_PROMPT_FORMAT extends TextGenerationPromptFormat<unknown, OpenAIChatMessage[]>>(promptFormat: TARGET_PROMPT_FORMAT): OpenAIChatFunctionCallStructureGenerationModel<TARGET_PROMPT_FORMAT>;
36
+ withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
37
+ /**
38
+ * JSON generation uses the OpenAI GPT function calling API.
39
+ * It provides a single function specification and instructs the model to provide parameters for calling the function.
40
+ * The result is returned as parsed JSON.
41
+ *
42
+ * @see https://platform.openai.com/docs/guides/gpt/function-calling
43
+ */
44
+ doGenerateStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: Parameters<PROMPT_FORMAT["format"]>[0], // first argument of the function
45
+ options?: FunctionOptions): Promise<{
46
+ response: {
47
+ object: "chat.completion";
48
+ usage: {
49
+ prompt_tokens: number;
50
+ total_tokens: number;
51
+ completion_tokens: number;
52
+ };
53
+ model: string;
54
+ id: string;
55
+ choices: {
56
+ message: {
57
+ role: "assistant";
58
+ content: string | null;
59
+ function_call?: {
60
+ name: string;
61
+ arguments: string;
62
+ } | undefined;
63
+ tool_calls?: {
64
+ function: {
65
+ name: string;
66
+ arguments: string;
67
+ };
68
+ type: "function";
69
+ id: string;
70
+ }[] | undefined;
71
+ };
72
+ index: number;
73
+ logprobs?: any;
74
+ finish_reason?: "length" | "stop" | "tool_calls" | "function_call" | "content_filter" | null | undefined;
75
+ }[];
76
+ created: number;
77
+ system_fingerprint?: string | undefined;
78
+ };
79
+ valueText: string;
80
+ value: any;
81
+ usage: {
82
+ promptTokens: number;
83
+ completionTokens: number;
84
+ totalTokens: number;
85
+ };
86
+ }>;
87
+ doStreamStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: Parameters<PROMPT_FORMAT["format"]>[0], // first argument of the function
88
+ options?: FunctionOptions): Promise<AsyncIterable<import("../../../index.js").Delta<unknown>>>;
89
+ }
@@ -0,0 +1,140 @@
1
+ import SecureJSON from "secure-json-parse";
2
+ import { StructureParseError } from "../../../model-function/generate-structure/StructureParseError.js";
3
+ import { OpenAIChatResponseFormat, } from "./OpenAIChatModel";
4
+ import { chat, instruction, text } from "./OpenAIChatPromptFormat.js";
5
+ export class OpenAIChatFunctionCallStructureGenerationModel {
6
+ constructor({ model, fnName, fnDescription, promptFormat, }) {
7
+ Object.defineProperty(this, "model", {
8
+ enumerable: true,
9
+ configurable: true,
10
+ writable: true,
11
+ value: void 0
12
+ });
13
+ Object.defineProperty(this, "fnName", {
14
+ enumerable: true,
15
+ configurable: true,
16
+ writable: true,
17
+ value: void 0
18
+ });
19
+ Object.defineProperty(this, "fnDescription", {
20
+ enumerable: true,
21
+ configurable: true,
22
+ writable: true,
23
+ value: void 0
24
+ });
25
+ Object.defineProperty(this, "promptFormat", {
26
+ enumerable: true,
27
+ configurable: true,
28
+ writable: true,
29
+ value: void 0
30
+ });
31
+ this.model = model;
32
+ this.fnName = fnName;
33
+ this.fnDescription = fnDescription;
34
+ this.promptFormat = promptFormat;
35
+ }
36
+ get modelInformation() {
37
+ return this.model.modelInformation;
38
+ }
39
+ get settings() {
40
+ return this.model.settings;
41
+ }
42
+ get settingsForEvent() {
43
+ return this.model.settingsForEvent;
44
+ }
45
+ /**
46
+ * Returns this model with a text prompt format.
47
+ */
48
+ withTextPrompt() {
49
+ return this.withPromptFormat(text());
50
+ }
51
+ /**
52
+ * Returns this model with an instruction prompt format.
53
+ */
54
+ withInstructionPrompt() {
55
+ return this.withPromptFormat(instruction());
56
+ }
57
+ /**
58
+ * Returns this model with a chat prompt format.
59
+ */
60
+ withChatPrompt() {
61
+ return this.withPromptFormat(chat());
62
+ }
63
+ withPromptFormat(promptFormat) {
64
+ return new OpenAIChatFunctionCallStructureGenerationModel({
65
+ model: this.model,
66
+ fnName: this.fnName,
67
+ fnDescription: this.fnDescription,
68
+ promptFormat,
69
+ });
70
+ }
71
+ withSettings(additionalSettings) {
72
+ return new OpenAIChatFunctionCallStructureGenerationModel({
73
+ model: this.model.withSettings(additionalSettings),
74
+ fnName: this.fnName,
75
+ fnDescription: this.fnDescription,
76
+ promptFormat: this.promptFormat,
77
+ });
78
+ }
79
+ /**
80
+ * JSON generation uses the OpenAI GPT function calling API.
81
+ * It provides a single function specification and instructs the model to provide parameters for calling the function.
82
+ * The result is returned as parsed JSON.
83
+ *
84
+ * @see https://platform.openai.com/docs/guides/gpt/function-calling
85
+ */
86
+ async doGenerateStructure(schema, prompt, // first argument of the function
87
+ options) {
88
+ const expandedPrompt = this.promptFormat.format(prompt);
89
+ const response = await this.model
90
+ .withSettings({
91
+ stopSequences: [
92
+ ...(this.settings.stopSequences ?? []),
93
+ ...this.promptFormat.stopSequences,
94
+ ],
95
+ })
96
+ .callAPI(expandedPrompt, {
97
+ ...options,
98
+ responseFormat: OpenAIChatResponseFormat.json,
99
+ functionCall: { name: this.fnName },
100
+ functions: [
101
+ {
102
+ name: this.fnName,
103
+ description: this.fnDescription,
104
+ parameters: schema.getJsonSchema(),
105
+ },
106
+ ],
107
+ });
108
+ const valueText = response.choices[0].message.function_call.arguments;
109
+ try {
110
+ return {
111
+ response,
112
+ valueText,
113
+ value: SecureJSON.parse(valueText),
114
+ usage: this.model.extractUsage(response),
115
+ };
116
+ }
117
+ catch (error) {
118
+ throw new StructureParseError({
119
+ valueText,
120
+ cause: error,
121
+ });
122
+ }
123
+ }
124
+ async doStreamStructure(schema, prompt, // first argument of the function
125
+ options) {
126
+ const expandedPrompt = this.promptFormat.format(prompt);
127
+ return this.model.callAPI(expandedPrompt, {
128
+ ...options,
129
+ responseFormat: OpenAIChatResponseFormat.structureDeltaIterable,
130
+ functionCall: { name: this.fnName },
131
+ functions: [
132
+ {
133
+ name: this.fnName,
134
+ description: this.fnDescription,
135
+ parameters: schema.getJsonSchema(),
136
+ },
137
+ ],
138
+ });
139
+ }
140
+ }
@@ -1,21 +1,18 @@
1
1
  "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
2
  Object.defineProperty(exports, "__esModule", { value: true });
6
3
  exports.OpenAIChatResponseFormat = exports.OpenAIChatModel = exports.calculateOpenAIChatCostInMillicents = exports.isOpenAIChatModel = exports.getOpenAIChatModelInformation = exports.OPENAI_CHAT_MODELS = void 0;
7
- const secure_json_parse_1 = __importDefault(require("secure-json-parse"));
8
4
  const zod_1 = require("zod");
9
5
  const callWithRetryAndThrottle_js_1 = require("../../../core/api/callWithRetryAndThrottle.cjs");
10
6
  const postToApi_js_1 = require("../../../core/api/postToApi.cjs");
11
7
  const parseJSON_js_1 = require("../../../core/schema/parseJSON.cjs");
12
8
  const AbstractModel_js_1 = require("../../../model-function/AbstractModel.cjs");
13
- const StructureParseError_js_1 = require("../../../model-function/generate-structure/StructureParseError.cjs");
9
+ const StructureFromTextStreamingModel_js_1 = require("../../../model-function/generate-structure/StructureFromTextStreamingModel.cjs");
14
10
  const parsePartialJson_js_1 = require("../../../model-function/generate-structure/parsePartialJson.cjs");
15
11
  const PromptFormatTextStreamingModel_js_1 = require("../../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
16
12
  const OpenAIApiConfiguration_js_1 = require("../OpenAIApiConfiguration.cjs");
17
13
  const OpenAIError_js_1 = require("../OpenAIError.cjs");
18
14
  const TikTokenTokenizer_js_1 = require("../TikTokenTokenizer.cjs");
15
+ const OpenAIChatFunctionCallStructureGenerationModel_js_1 = require("./OpenAIChatFunctionCallStructureGenerationModel.cjs");
19
16
  const OpenAIChatPromptFormat_js_1 = require("./OpenAIChatPromptFormat.cjs");
20
17
  const OpenAIChatStreamIterable_js_1 = require("./OpenAIChatStreamIterable.cjs");
21
18
  const countOpenAIChatMessageTokens_js_1 = require("./countOpenAIChatMessageTokens.cjs");
@@ -264,57 +261,6 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
264
261
  responseFormat: exports.OpenAIChatResponseFormat.textDeltaIterable,
265
262
  });
266
263
  }
267
- /**
268
- * JSON generation uses the OpenAI GPT function calling API.
269
- * It provides a single function specification and instructs the model to provide parameters for calling the function.
270
- * The result is returned as parsed JSON.
271
- *
272
- * @see https://platform.openai.com/docs/guides/gpt/function-calling
273
- */
274
- async doGenerateStructure(structureDefinition, prompt, options) {
275
- const response = await this.callAPI(prompt, {
276
- ...options,
277
- responseFormat: exports.OpenAIChatResponseFormat.json,
278
- functionCall: { name: structureDefinition.name },
279
- functions: [
280
- {
281
- name: structureDefinition.name,
282
- description: structureDefinition.description,
283
- parameters: structureDefinition.schema.getJsonSchema(),
284
- },
285
- ],
286
- });
287
- const valueText = response.choices[0].message.function_call.arguments;
288
- try {
289
- return {
290
- response,
291
- valueText,
292
- value: secure_json_parse_1.default.parse(valueText),
293
- usage: this.extractUsage(response),
294
- };
295
- }
296
- catch (error) {
297
- throw new StructureParseError_js_1.StructureParseError({
298
- structureName: structureDefinition.name,
299
- valueText,
300
- cause: error,
301
- });
302
- }
303
- }
304
- async doStreamStructure(structureDefinition, prompt, options) {
305
- return this.callAPI(prompt, {
306
- ...options,
307
- responseFormat: exports.OpenAIChatResponseFormat.structureDeltaIterable,
308
- functionCall: { name: structureDefinition.name },
309
- functions: [
310
- {
311
- name: structureDefinition.name,
312
- description: structureDefinition.description,
313
- parameters: structureDefinition.schema.getJsonSchema(),
314
- },
315
- ],
316
- });
317
- }
318
264
  async doGenerateToolCall(tool, prompt, options) {
319
265
  const response = await this.callAPI(prompt, {
320
266
  ...options,
@@ -379,6 +325,20 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
379
325
  totalTokens: response.usage.total_tokens,
380
326
  };
381
327
  }
328
+ asFunctionCallStructureGenerationModel({ fnName, fnDescription, }) {
329
+ return new OpenAIChatFunctionCallStructureGenerationModel_js_1.OpenAIChatFunctionCallStructureGenerationModel({
330
+ model: this,
331
+ fnName,
332
+ fnDescription,
333
+ promptFormat: (0, OpenAIChatPromptFormat_js_1.identity)(),
334
+ });
335
+ }
336
+ asStructureGenerationModel(promptFormat) {
337
+ return new StructureFromTextStreamingModel_js_1.StructureFromTextStreamingModel({
338
+ model: this,
339
+ format: promptFormat,
340
+ });
341
+ }
382
342
  /**
383
343
  * Returns this model with a text prompt format.
384
344
  */
@@ -2,10 +2,10 @@ import { z } from "zod";
2
2
  import { FunctionOptions } from "../../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../../core/api/postToApi.js";
5
- import { StructureDefinition } from "../../../core/schema/StructureDefinition.js";
6
5
  import { AbstractModel } from "../../../model-function/AbstractModel.js";
7
6
  import { Delta } from "../../../model-function/Delta.js";
8
- import { StructureGenerationModel } from "../../../model-function/generate-structure/StructureGenerationModel.js";
7
+ import { StructureFromTextPromptFormat } from "../../../model-function/generate-structure/StructureFromTextPromptFormat.js";
8
+ import { StructureFromTextStreamingModel } from "../../../model-function/generate-structure/StructureFromTextStreamingModel.js";
9
9
  import { PromptFormatTextStreamingModel } from "../../../model-function/generate-text/PromptFormatTextStreamingModel.js";
10
10
  import { TextGenerationModelSettings, TextStreamingModel } from "../../../model-function/generate-text/TextGenerationModel.js";
11
11
  import { TextGenerationPromptFormat } from "../../../model-function/generate-text/TextGenerationPromptFormat.js";
@@ -13,6 +13,7 @@ import { ToolDefinition } from "../../../tool/ToolDefinition.js";
13
13
  import { ToolCallGenerationModel } from "../../../tool/generate-tool-call/ToolCallGenerationModel.js";
14
14
  import { ToolCallsOrTextGenerationModel } from "../../../tool/generate-tool-calls-or-text/ToolCallsOrTextGenerationModel.js";
15
15
  import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
16
+ import { OpenAIChatFunctionCallStructureGenerationModel } from "./OpenAIChatFunctionCallStructureGenerationModel.js";
16
17
  import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
17
18
  export declare const OPENAI_CHAT_MODELS: {
18
19
  "gpt-4": {
@@ -168,7 +169,7 @@ export interface OpenAIChatSettings extends TextGenerationModelSettings, Omit<Op
168
169
  * ),
169
170
  * ]);
170
171
  */
171
- export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> implements TextStreamingModel<OpenAIChatMessage[], OpenAIChatSettings>, StructureGenerationModel<OpenAIChatMessage[], OpenAIChatSettings>, ToolCallGenerationModel<OpenAIChatMessage[], OpenAIChatSettings>, ToolCallsOrTextGenerationModel<OpenAIChatMessage[], OpenAIChatSettings> {
172
+ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> implements TextStreamingModel<OpenAIChatMessage[], OpenAIChatSettings>, ToolCallGenerationModel<OpenAIChatMessage[], OpenAIChatSettings>, ToolCallsOrTextGenerationModel<OpenAIChatMessage[], OpenAIChatSettings> {
172
173
  constructor(settings: OpenAIChatSettings);
173
174
  readonly provider: "openai";
174
175
  get modelName(): OpenAIChatModelType;
@@ -230,56 +231,6 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
230
231
  };
231
232
  }>;
232
233
  doStreamText(prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
233
- /**
234
- * JSON generation uses the OpenAI GPT function calling API.
235
- * It provides a single function specification and instructs the model to provide parameters for calling the function.
236
- * The result is returned as parsed JSON.
237
- *
238
- * @see https://platform.openai.com/docs/guides/gpt/function-calling
239
- */
240
- doGenerateStructure(structureDefinition: StructureDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<{
241
- response: {
242
- object: "chat.completion";
243
- usage: {
244
- prompt_tokens: number;
245
- total_tokens: number;
246
- completion_tokens: number;
247
- };
248
- model: string;
249
- id: string;
250
- choices: {
251
- message: {
252
- role: "assistant";
253
- content: string | null;
254
- function_call?: {
255
- name: string;
256
- arguments: string;
257
- } | undefined;
258
- tool_calls?: {
259
- function: {
260
- name: string;
261
- arguments: string;
262
- };
263
- type: "function";
264
- id: string;
265
- }[] | undefined;
266
- };
267
- index: number;
268
- logprobs?: any;
269
- finish_reason?: "length" | "stop" | "tool_calls" | "function_call" | "content_filter" | null | undefined;
270
- }[];
271
- created: number;
272
- system_fingerprint?: string | undefined;
273
- };
274
- valueText: string;
275
- value: any;
276
- usage: {
277
- promptTokens: number;
278
- completionTokens: number;
279
- totalTokens: number;
280
- };
281
- }>;
282
- doStreamStructure(structureDefinition: StructureDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<AsyncIterable<Delta<unknown>>>;
283
234
  doGenerateToolCall(tool: ToolDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<{
284
235
  response: {
285
236
  object: "chat.completion";
@@ -375,6 +326,11 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
375
326
  completionTokens: number;
376
327
  totalTokens: number;
377
328
  };
329
+ asFunctionCallStructureGenerationModel({ fnName, fnDescription, }: {
330
+ fnName: string;
331
+ fnDescription?: string;
332
+ }): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptFormat<OpenAIChatMessage[], OpenAIChatMessage[]>>;
333
+ asStructureGenerationModel<INPUT_PROMPT>(promptFormat: StructureFromTextPromptFormat<INPUT_PROMPT, OpenAIChatMessage[]>): StructureFromTextStreamingModel<INPUT_PROMPT, OpenAIChatMessage[], this>;
378
334
  /**
379
335
  * Returns this model with a text prompt format.
380
336
  */
@@ -1,16 +1,16 @@
1
- import SecureJSON from "secure-json-parse";
2
1
  import { z } from "zod";
3
2
  import { callWithRetryAndThrottle } from "../../../core/api/callWithRetryAndThrottle.js";
4
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../../core/api/postToApi.js";
5
4
  import { parseJSON } from "../../../core/schema/parseJSON.js";
6
5
  import { AbstractModel } from "../../../model-function/AbstractModel.js";
7
- import { StructureParseError } from "../../../model-function/generate-structure/StructureParseError.js";
6
+ import { StructureFromTextStreamingModel } from "../../../model-function/generate-structure/StructureFromTextStreamingModel.js";
8
7
  import { parsePartialJson } from "../../../model-function/generate-structure/parsePartialJson.js";
9
8
  import { PromptFormatTextStreamingModel } from "../../../model-function/generate-text/PromptFormatTextStreamingModel.js";
10
9
  import { OpenAIApiConfiguration } from "../OpenAIApiConfiguration.js";
11
10
  import { failedOpenAICallResponseHandler } from "../OpenAIError.js";
12
11
  import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
13
- import { chat, instruction, text } from "./OpenAIChatPromptFormat.js";
12
+ import { OpenAIChatFunctionCallStructureGenerationModel } from "./OpenAIChatFunctionCallStructureGenerationModel.js";
13
+ import { chat, identity, instruction, text } from "./OpenAIChatPromptFormat.js";
14
14
  import { createOpenAIChatDeltaIterableQueue } from "./OpenAIChatStreamIterable.js";
15
15
  import { countOpenAIChatPromptTokens } from "./countOpenAIChatMessageTokens.js";
16
16
  /*
@@ -255,57 +255,6 @@ export class OpenAIChatModel extends AbstractModel {
255
255
  responseFormat: OpenAIChatResponseFormat.textDeltaIterable,
256
256
  });
257
257
  }
258
- /**
259
- * JSON generation uses the OpenAI GPT function calling API.
260
- * It provides a single function specification and instructs the model to provide parameters for calling the function.
261
- * The result is returned as parsed JSON.
262
- *
263
- * @see https://platform.openai.com/docs/guides/gpt/function-calling
264
- */
265
- async doGenerateStructure(structureDefinition, prompt, options) {
266
- const response = await this.callAPI(prompt, {
267
- ...options,
268
- responseFormat: OpenAIChatResponseFormat.json,
269
- functionCall: { name: structureDefinition.name },
270
- functions: [
271
- {
272
- name: structureDefinition.name,
273
- description: structureDefinition.description,
274
- parameters: structureDefinition.schema.getJsonSchema(),
275
- },
276
- ],
277
- });
278
- const valueText = response.choices[0].message.function_call.arguments;
279
- try {
280
- return {
281
- response,
282
- valueText,
283
- value: SecureJSON.parse(valueText),
284
- usage: this.extractUsage(response),
285
- };
286
- }
287
- catch (error) {
288
- throw new StructureParseError({
289
- structureName: structureDefinition.name,
290
- valueText,
291
- cause: error,
292
- });
293
- }
294
- }
295
- async doStreamStructure(structureDefinition, prompt, options) {
296
- return this.callAPI(prompt, {
297
- ...options,
298
- responseFormat: OpenAIChatResponseFormat.structureDeltaIterable,
299
- functionCall: { name: structureDefinition.name },
300
- functions: [
301
- {
302
- name: structureDefinition.name,
303
- description: structureDefinition.description,
304
- parameters: structureDefinition.schema.getJsonSchema(),
305
- },
306
- ],
307
- });
308
- }
309
258
  async doGenerateToolCall(tool, prompt, options) {
310
259
  const response = await this.callAPI(prompt, {
311
260
  ...options,
@@ -370,6 +319,20 @@ export class OpenAIChatModel extends AbstractModel {
370
319
  totalTokens: response.usage.total_tokens,
371
320
  };
372
321
  }
322
+ asFunctionCallStructureGenerationModel({ fnName, fnDescription, }) {
323
+ return new OpenAIChatFunctionCallStructureGenerationModel({
324
+ model: this,
325
+ fnName,
326
+ fnDescription,
327
+ promptFormat: identity(),
328
+ });
329
+ }
330
+ asStructureGenerationModel(promptFormat) {
331
+ return new StructureFromTextStreamingModel({
332
+ model: this,
333
+ format: promptFormat,
334
+ });
335
+ }
373
336
  /**
374
337
  * Returns this model with a text prompt format.
375
338
  */