modelfusion 0.41.3 → 0.43.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/README.md +55 -4
  2. package/guard/fixStructure.cjs +21 -0
  3. package/guard/fixStructure.d.ts +10 -0
  4. package/guard/fixStructure.js +17 -0
  5. package/guard/guard.cjs +69 -0
  6. package/guard/guard.d.ts +28 -0
  7. package/guard/guard.js +65 -0
  8. package/guard/index.cjs +18 -0
  9. package/guard/index.d.ts +2 -0
  10. package/guard/index.js +2 -0
  11. package/index.cjs +1 -0
  12. package/index.d.ts +1 -0
  13. package/index.js +1 -0
  14. package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +15 -4
  15. package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +2 -1
  16. package/model-function/generate-structure/StructureFromTextGenerationModel.js +15 -4
  17. package/model-function/generate-structure/StructureGenerationModel.d.ts +2 -1
  18. package/model-function/generate-structure/StructureOrTextGenerationModel.d.ts +2 -0
  19. package/model-function/generate-structure/StructureParseError.cjs +34 -0
  20. package/model-function/generate-structure/StructureParseError.d.ts +10 -0
  21. package/model-function/generate-structure/StructureParseError.js +30 -0
  22. package/model-function/generate-structure/StructureValidationError.cjs +10 -3
  23. package/model-function/generate-structure/StructureValidationError.d.ts +3 -1
  24. package/model-function/generate-structure/StructureValidationError.js +10 -3
  25. package/model-function/generate-structure/generateStructure.cjs +2 -1
  26. package/model-function/generate-structure/generateStructure.js +2 -1
  27. package/model-function/generate-structure/generateStructureOrText.cjs +1 -0
  28. package/model-function/generate-structure/generateStructureOrText.js +1 -0
  29. package/model-function/generate-text/TextGenerationModel.d.ts +2 -4
  30. package/model-function/index.cjs +1 -0
  31. package/model-function/index.d.ts +1 -0
  32. package/model-function/index.js +1 -0
  33. package/model-provider/anthropic/AnthropicApiConfiguration.cjs +23 -0
  34. package/model-provider/anthropic/AnthropicApiConfiguration.d.ts +11 -0
  35. package/model-provider/anthropic/AnthropicApiConfiguration.js +19 -0
  36. package/model-provider/anthropic/AnthropicError.cjs +39 -0
  37. package/model-provider/anthropic/AnthropicError.d.ts +37 -0
  38. package/model-provider/anthropic/AnthropicError.js +31 -0
  39. package/model-provider/anthropic/AnthropicPromptFormat.cjs +66 -0
  40. package/model-provider/anthropic/AnthropicPromptFormat.d.ts +11 -0
  41. package/model-provider/anthropic/AnthropicPromptFormat.js +61 -0
  42. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +226 -0
  43. package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +102 -0
  44. package/model-provider/anthropic/AnthropicTextGenerationModel.js +219 -0
  45. package/model-provider/anthropic/index.cjs +23 -0
  46. package/model-provider/anthropic/index.d.ts +4 -0
  47. package/model-provider/anthropic/index.js +4 -0
  48. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +1 -1
  49. package/model-provider/index.cjs +1 -0
  50. package/model-provider/index.d.ts +1 -0
  51. package/model-provider/index.js +1 -0
  52. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +0 -3
  53. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +0 -1
  54. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +0 -3
  55. package/model-provider/openai/chat/OpenAIChatModel.cjs +47 -20
  56. package/model-provider/openai/chat/OpenAIChatModel.d.ts +36 -2
  57. package/model-provider/openai/chat/OpenAIChatModel.js +47 -20
  58. package/package.json +1 -1
  59. package/prompt/PromptFormatTextGenerationModel.cjs +3 -3
  60. package/prompt/PromptFormatTextGenerationModel.d.ts +1 -1
  61. package/prompt/PromptFormatTextGenerationModel.js +3 -3
@@ -0,0 +1,102 @@
1
+ import { z } from "zod";
2
+ import { FunctionOptions } from "../../core/FunctionOptions.js";
3
+ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
+ import { ResponseHandler } from "../../core/api/postToApi.js";
5
+ import { AbstractModel } from "../../model-function/AbstractModel.js";
6
+ import { Delta } from "../../model-function/Delta.js";
7
+ import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
8
+ import { PromptFormat } from "../../prompt/PromptFormat.js";
9
+ import { PromptFormatTextStreamingModel } from "../../prompt/PromptFormatTextStreamingModel.js";
10
+ export declare const ANTHROPIC_TEXT_GENERATION_MODELS: {
11
+ "claude-instant-1": {
12
+ contextWindowSize: number;
13
+ };
14
+ "claude-instant-1.2": {
15
+ contextWindowSize: number;
16
+ };
17
+ "claude-2": {
18
+ contextWindowSize: number;
19
+ };
20
+ "claude-2.0": {
21
+ contextWindowSize: number;
22
+ };
23
+ };
24
+ export type AnthropicTextGenerationModelType = keyof typeof ANTHROPIC_TEXT_GENERATION_MODELS;
25
+ export interface AnthropicTextGenerationModelSettings extends TextGenerationModelSettings {
26
+ api?: ApiConfiguration;
27
+ model: AnthropicTextGenerationModelType;
28
+ temperature?: number;
29
+ topP?: number;
30
+ topK?: number;
31
+ userId?: number;
32
+ }
33
+ /**
34
+ * Create a text generation model that calls the Anthropic API.
35
+ *
36
+ * @see https://docs.anthropic.com/claude/reference/complete_post
37
+ */
38
+ export declare class AnthropicTextGenerationModel extends AbstractModel<AnthropicTextGenerationModelSettings> implements TextStreamingModel<string, AnthropicTextGenerationModelSettings> {
39
+ constructor(settings: AnthropicTextGenerationModelSettings);
40
+ readonly provider: "anthropic";
41
+ get modelName(): "claude-instant-1" | "claude-instant-1.2" | "claude-2" | "claude-2.0";
42
+ readonly contextWindowSize: number;
43
+ readonly tokenizer: undefined;
44
+ readonly countPromptTokens: undefined;
45
+ callAPI<RESPONSE>(prompt: string, options: {
46
+ responseFormat: AnthropicTextGenerationResponseFormatType<RESPONSE>;
47
+ } & FunctionOptions): Promise<RESPONSE>;
48
+ get settingsForEvent(): Partial<AnthropicTextGenerationModelSettings>;
49
+ doGenerateText(prompt: string, options?: FunctionOptions): Promise<{
50
+ response: {
51
+ model: string;
52
+ completion: string;
53
+ stop_reason: string;
54
+ };
55
+ text: string;
56
+ }>;
57
+ doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
58
+ withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, AnthropicTextGenerationModelSettings, this>;
59
+ withSettings(additionalSettings: Partial<AnthropicTextGenerationModelSettings>): this;
60
+ }
61
+ declare const anthropicTextGenerationResponseSchema: z.ZodObject<{
62
+ completion: z.ZodString;
63
+ stop_reason: z.ZodString;
64
+ model: z.ZodString;
65
+ }, "strip", z.ZodTypeAny, {
66
+ model: string;
67
+ completion: string;
68
+ stop_reason: string;
69
+ }, {
70
+ model: string;
71
+ completion: string;
72
+ stop_reason: string;
73
+ }>;
74
+ export type AnthropicTextGenerationResponse = z.infer<typeof anthropicTextGenerationResponseSchema>;
75
+ export type AnthropicTextGenerationResponseFormatType<T> = {
76
+ stream: boolean;
77
+ handler: ResponseHandler<T>;
78
+ };
79
+ export declare const AnthropicTextGenerationResponseFormat: {
80
+ /**
81
+ * Returns the response as a JSON object.
82
+ */
83
+ json: {
84
+ stream: false;
85
+ handler: ResponseHandler<{
86
+ model: string;
87
+ completion: string;
88
+ stop_reason: string;
89
+ }>;
90
+ };
91
+ /**
92
+ * Returns an async iterable over the full deltas (all choices, including full current state at time of event)
93
+ * of the response stream.
94
+ */
95
+ deltaIterable: {
96
+ stream: true;
97
+ handler: ({ response }: {
98
+ response: Response;
99
+ }) => Promise<AsyncIterable<Delta<string>>>;
100
+ };
101
+ };
102
+ export {};
@@ -0,0 +1,219 @@
1
+ import SecureJSON from "secure-json-parse";
2
+ import { z } from "zod";
3
+ import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
4
+ import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
5
+ import { AsyncQueue } from "../../event-source/AsyncQueue.js";
6
+ import { parseEventSourceStream } from "../../event-source/parseEventSourceStream.js";
7
+ import { AbstractModel } from "../../model-function/AbstractModel.js";
8
+ import { PromptFormatTextStreamingModel } from "../../prompt/PromptFormatTextStreamingModel.js";
9
+ import { AnthropicApiConfiguration } from "./AnthropicApiConfiguration.js";
10
+ import { failedAnthropicCallResponseHandler } from "./AnthropicError.js";
11
+ export const ANTHROPIC_TEXT_GENERATION_MODELS = {
12
+ "claude-instant-1": {
13
+ contextWindowSize: 100000,
14
+ },
15
+ "claude-instant-1.2": {
16
+ contextWindowSize: 100000,
17
+ },
18
+ "claude-2": {
19
+ contextWindowSize: 100000,
20
+ },
21
+ "claude-2.0": {
22
+ contextWindowSize: 100000,
23
+ },
24
+ };
25
+ /**
26
+ * Create a text generation model that calls the Anthropic API.
27
+ *
28
+ * @see https://docs.anthropic.com/claude/reference/complete_post
29
+ */
30
+ export class AnthropicTextGenerationModel extends AbstractModel {
31
+ constructor(settings) {
32
+ super({ settings });
33
+ Object.defineProperty(this, "provider", {
34
+ enumerable: true,
35
+ configurable: true,
36
+ writable: true,
37
+ value: "anthropic"
38
+ });
39
+ Object.defineProperty(this, "contextWindowSize", {
40
+ enumerable: true,
41
+ configurable: true,
42
+ writable: true,
43
+ value: void 0
44
+ });
45
+ Object.defineProperty(this, "tokenizer", {
46
+ enumerable: true,
47
+ configurable: true,
48
+ writable: true,
49
+ value: undefined
50
+ });
51
+ Object.defineProperty(this, "countPromptTokens", {
52
+ enumerable: true,
53
+ configurable: true,
54
+ writable: true,
55
+ value: undefined
56
+ });
57
+ this.contextWindowSize =
58
+ ANTHROPIC_TEXT_GENERATION_MODELS[this.settings.model].contextWindowSize;
59
+ }
60
+ get modelName() {
61
+ return this.settings.model;
62
+ }
63
+ async callAPI(prompt, options) {
64
+ return callWithRetryAndThrottle({
65
+ retry: this.settings.api?.retry,
66
+ throttle: this.settings.api?.throttle,
67
+ call: async () => callAnthropicTextGenerationAPI({
68
+ ...this.settings,
69
+ stopSequences: this.settings.stopSequences,
70
+ maxTokens: this.settings.maxCompletionTokens,
71
+ abortSignal: options.run?.abortSignal,
72
+ responseFormat: options.responseFormat,
73
+ prompt,
74
+ }),
75
+ });
76
+ }
77
+ get settingsForEvent() {
78
+ const eventSettingProperties = [
79
+ "maxCompletionTokens",
80
+ "stopSequences",
81
+ "temperature",
82
+ "topK",
83
+ "topP",
84
+ "userId",
85
+ ];
86
+ return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
87
+ }
88
+ async doGenerateText(prompt, options) {
89
+ const response = await this.callAPI(prompt, {
90
+ ...options,
91
+ responseFormat: AnthropicTextGenerationResponseFormat.json,
92
+ });
93
+ return {
94
+ response,
95
+ text: response.completion,
96
+ };
97
+ }
98
+ doStreamText(prompt, options) {
99
+ return this.callAPI(prompt, {
100
+ ...options,
101
+ responseFormat: AnthropicTextGenerationResponseFormat.deltaIterable,
102
+ });
103
+ }
104
+ withPromptFormat(promptFormat) {
105
+ return new PromptFormatTextStreamingModel({
106
+ model: this.withSettings({
107
+ stopSequences: [
108
+ ...(this.settings.stopSequences ?? []),
109
+ ...promptFormat.stopSequences,
110
+ ],
111
+ }),
112
+ promptFormat,
113
+ });
114
+ }
115
+ withSettings(additionalSettings) {
116
+ return new AnthropicTextGenerationModel(Object.assign({}, this.settings, additionalSettings));
117
+ }
118
+ }
119
+ const anthropicTextGenerationResponseSchema = z.object({
120
+ completion: z.string(),
121
+ stop_reason: z.string(),
122
+ model: z.string(),
123
+ });
124
+ async function callAnthropicTextGenerationAPI({ api = new AnthropicApiConfiguration(), abortSignal, responseFormat, model, prompt, maxTokens, stopSequences, temperature, topK, topP, userId, }) {
125
+ return postJsonToApi({
126
+ url: api.assembleUrl(`/complete`),
127
+ headers: api.headers,
128
+ body: {
129
+ model,
130
+ prompt,
131
+ stream: responseFormat.stream,
132
+ max_tokens_to_sample: maxTokens,
133
+ temperature,
134
+ top_k: topK,
135
+ top_p: topP,
136
+ stop_sequences: stopSequences,
137
+ metadata: userId != null ? { user_id: userId } : undefined,
138
+ },
139
+ failedResponseHandler: failedAnthropicCallResponseHandler,
140
+ successfulResponseHandler: responseFormat.handler,
141
+ abortSignal,
142
+ });
143
+ }
144
+ const anthropicTextStreamingResponseSchema = z.object({
145
+ completion: z.string(),
146
+ stop_reason: z.string().nullable(),
147
+ model: z.string(),
148
+ });
149
+ async function createAnthropicFullDeltaIterableQueue(stream) {
150
+ const queue = new AsyncQueue();
151
+ let content = "";
152
+ // process the stream asynchonously (no 'await' on purpose):
153
+ parseEventSourceStream({ stream })
154
+ .then(async (events) => {
155
+ try {
156
+ for await (const event of events) {
157
+ if (event.event === "error") {
158
+ queue.push({ type: "error", error: event.data });
159
+ queue.close();
160
+ return;
161
+ }
162
+ if (event.event !== "completion") {
163
+ continue;
164
+ }
165
+ const data = event.data;
166
+ const json = SecureJSON.parse(data);
167
+ const parseResult = anthropicTextStreamingResponseSchema.safeParse(json);
168
+ if (!parseResult.success) {
169
+ queue.push({
170
+ type: "error",
171
+ error: parseResult.error,
172
+ });
173
+ queue.close();
174
+ return;
175
+ }
176
+ const eventData = parseResult.data;
177
+ content += eventData.completion;
178
+ queue.push({
179
+ type: "delta",
180
+ fullDelta: {
181
+ content,
182
+ isComplete: eventData.stop_reason != null,
183
+ delta: eventData.completion,
184
+ },
185
+ valueDelta: eventData.completion,
186
+ });
187
+ if (eventData.stop_reason != null) {
188
+ queue.close();
189
+ }
190
+ }
191
+ }
192
+ catch (error) {
193
+ queue.push({ type: "error", error });
194
+ queue.close();
195
+ }
196
+ })
197
+ .catch((error) => {
198
+ queue.push({ type: "error", error });
199
+ queue.close();
200
+ });
201
+ return queue;
202
+ }
203
+ export const AnthropicTextGenerationResponseFormat = {
204
+ /**
205
+ * Returns the response as a JSON object.
206
+ */
207
+ json: {
208
+ stream: false,
209
+ handler: createJsonResponseHandler(anthropicTextGenerationResponseSchema),
210
+ },
211
+ /**
212
+ * Returns an async iterable over the full deltas (all choices, including full current state at time of event)
213
+ * of the response stream.
214
+ */
215
+ deltaIterable: {
216
+ stream: true,
217
+ handler: async ({ response }) => createAnthropicFullDeltaIterableQueue(response.body),
218
+ },
219
+ };
@@ -0,0 +1,23 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
+ };
16
+ Object.defineProperty(exports, "__esModule", { value: true });
17
+ exports.anthropicErrorDataSchema = exports.AnthropicError = void 0;
18
+ __exportStar(require("./AnthropicApiConfiguration.cjs"), exports);
19
+ var AnthropicError_js_1 = require("./AnthropicError.cjs");
20
+ Object.defineProperty(exports, "AnthropicError", { enumerable: true, get: function () { return AnthropicError_js_1.AnthropicError; } });
21
+ Object.defineProperty(exports, "anthropicErrorDataSchema", { enumerable: true, get: function () { return AnthropicError_js_1.anthropicErrorDataSchema; } });
22
+ __exportStar(require("./AnthropicPromptFormat.cjs"), exports);
23
+ __exportStar(require("./AnthropicTextGenerationModel.cjs"), exports);
@@ -0,0 +1,4 @@
1
+ export * from "./AnthropicApiConfiguration.js";
2
+ export { AnthropicError, anthropicErrorDataSchema } from "./AnthropicError.js";
3
+ export * from "./AnthropicPromptFormat.js";
4
+ export * from "./AnthropicTextGenerationModel.js";
@@ -0,0 +1,4 @@
1
+ export * from "./AnthropicApiConfiguration.js";
2
+ export { AnthropicError, anthropicErrorDataSchema } from "./AnthropicError.js";
3
+ export * from "./AnthropicPromptFormat.js";
4
+ export * from "./AnthropicTextGenerationModel.js";
@@ -44,9 +44,9 @@ export declare class HuggingFaceTextGenerationModel extends AbstractModel<Huggin
44
44
  get modelName(): string;
45
45
  readonly contextWindowSize: undefined;
46
46
  readonly tokenizer: undefined;
47
+ readonly countPromptTokens: undefined;
47
48
  callAPI(prompt: string, options?: FunctionOptions): Promise<HuggingFaceTextGenerationResponse>;
48
49
  get settingsForEvent(): Partial<HuggingFaceTextGenerationModelSettings>;
49
- readonly countPromptTokens: undefined;
50
50
  doGenerateText(prompt: string, options?: FunctionOptions): Promise<{
51
51
  response: {
52
52
  generated_text: string;
@@ -14,6 +14,7 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
14
  for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
15
  };
16
16
  Object.defineProperty(exports, "__esModule", { value: true });
17
+ __exportStar(require("./anthropic/index.cjs"), exports);
17
18
  __exportStar(require("./automatic1111/index.cjs"), exports);
18
19
  __exportStar(require("./cohere/index.cjs"), exports);
19
20
  __exportStar(require("./elevenlabs/index.cjs"), exports);
@@ -1,3 +1,4 @@
1
+ export * from "./anthropic/index.js";
1
2
  export * from "./automatic1111/index.js";
2
3
  export * from "./cohere/index.js";
3
4
  export * from "./elevenlabs/index.js";
@@ -1,3 +1,4 @@
1
+ export * from "./anthropic/index.js";
1
2
  export * from "./automatic1111/index.js";
2
3
  export * from "./cohere/index.js";
3
4
  export * from "./elevenlabs/index.js";
@@ -102,9 +102,6 @@ class LlamaCppTextGenerationModel extends AbstractModel_js_1.AbstractModel {
102
102
  responseFormat: exports.LlamaCppTextGenerationResponseFormat.deltaIterable,
103
103
  });
104
104
  }
105
- extractTextDelta(fullDelta) {
106
- return fullDelta.delta;
107
- }
108
105
  withPromptFormat(promptFormat) {
109
106
  return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
110
107
  model: this.withSettings({
@@ -100,7 +100,6 @@ export declare class LlamaCppTextGenerationModel<CONTEXT_WINDOW_SIZE extends num
100
100
  };
101
101
  }>;
102
102
  doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
103
- extractTextDelta(fullDelta: LlamaCppTextGenerationDelta): string | undefined;
104
103
  withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>;
105
104
  withSettings(additionalSettings: Partial<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): this;
106
105
  }
@@ -96,9 +96,6 @@ export class LlamaCppTextGenerationModel extends AbstractModel {
96
96
  responseFormat: LlamaCppTextGenerationResponseFormat.deltaIterable,
97
97
  });
98
98
  }
99
- extractTextDelta(fullDelta) {
100
- return fullDelta.delta;
101
- }
102
99
  withPromptFormat(promptFormat) {
103
100
  return new PromptFormatTextStreamingModel({
104
101
  model: this.withSettings({
@@ -9,6 +9,7 @@ const zod_1 = __importDefault(require("zod"));
9
9
  const callWithRetryAndThrottle_js_1 = require("../../../core/api/callWithRetryAndThrottle.cjs");
10
10
  const postToApi_js_1 = require("../../../core/api/postToApi.cjs");
11
11
  const AbstractModel_js_1 = require("../../../model-function/AbstractModel.cjs");
12
+ const StructureParseError_js_1 = require("../../../model-function/generate-structure/StructureParseError.cjs");
12
13
  const parsePartialJson_js_1 = require("../../../model-function/generate-structure/parsePartialJson.cjs");
13
14
  const PromptFormatTextStreamingModel_js_1 = require("../../../prompt/PromptFormatTextStreamingModel.cjs");
14
15
  const OpenAIApiConfiguration_js_1 = require("../OpenAIApiConfiguration.cjs");
@@ -256,11 +257,22 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
256
257
  },
257
258
  ],
258
259
  });
259
- return {
260
- response,
261
- structure: secure_json_parse_1.default.parse(response.choices[0].message.function_call.arguments),
262
- usage: this.extractUsage(response),
263
- };
260
+ const valueText = response.choices[0].message.function_call.arguments;
261
+ try {
262
+ return {
263
+ response,
264
+ valueText,
265
+ value: secure_json_parse_1.default.parse(valueText),
266
+ usage: this.extractUsage(response),
267
+ };
268
+ }
269
+ catch (error) {
270
+ throw new StructureParseError_js_1.StructureParseError({
271
+ structureName: structureDefinition.name,
272
+ valueText: valueText,
273
+ cause: error,
274
+ });
275
+ }
264
276
  }
265
277
  async doStreamStructure(structureDefinition, prompt, options) {
266
278
  return this.callAPI(prompt, {
@@ -290,22 +302,37 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
290
302
  const message = response.choices[0].message;
291
303
  const content = message.content;
292
304
  const functionCall = message.function_call;
293
- const structureAndText = functionCall == null
294
- ? {
295
- structure: null,
296
- value: null,
297
- text: content ?? "",
298
- }
299
- : {
300
- structure: functionCall.name,
301
- value: secure_json_parse_1.default.parse(functionCall.arguments),
302
- text: content,
305
+ if (functionCall == null) {
306
+ return {
307
+ response,
308
+ structureAndText: {
309
+ structure: null,
310
+ value: null,
311
+ valueText: null,
312
+ text: content ?? "",
313
+ },
314
+ usage: this.extractUsage(response),
303
315
  };
304
- return {
305
- response,
306
- structureAndText,
307
- usage: this.extractUsage(response),
308
- };
316
+ }
317
+ try {
318
+ return {
319
+ response,
320
+ structureAndText: {
321
+ structure: functionCall.name,
322
+ value: secure_json_parse_1.default.parse(functionCall.arguments),
323
+ valueText: functionCall.arguments,
324
+ text: content,
325
+ },
326
+ usage: this.extractUsage(response),
327
+ };
328
+ }
329
+ catch (error) {
330
+ throw new StructureParseError_js_1.StructureParseError({
331
+ structureName: functionCall.name,
332
+ valueText: functionCall.arguments,
333
+ cause: error,
334
+ });
335
+ }
309
336
  }
310
337
  extractUsage(response) {
311
338
  return {
@@ -220,7 +220,8 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
220
220
  logprobs?: any;
221
221
  }[];
222
222
  };
223
- structure: any;
223
+ valueText: string;
224
+ value: any;
224
225
  usage: {
225
226
  promptTokens: number;
226
227
  completionTokens: number;
@@ -256,10 +257,43 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
256
257
  structureAndText: {
257
258
  structure: null;
258
259
  value: null;
260
+ valueText: null;
259
261
  text: string;
260
- } | {
262
+ };
263
+ usage: {
264
+ promptTokens: number;
265
+ completionTokens: number;
266
+ totalTokens: number;
267
+ };
268
+ } | {
269
+ response: {
270
+ object: "chat.completion";
271
+ model: string;
272
+ usage: {
273
+ prompt_tokens: number;
274
+ completion_tokens: number;
275
+ total_tokens: number;
276
+ };
277
+ id: string;
278
+ created: number;
279
+ choices: {
280
+ message: {
281
+ content: string | null;
282
+ role: "assistant";
283
+ function_call?: {
284
+ name: string;
285
+ arguments: string;
286
+ } | undefined;
287
+ };
288
+ finish_reason: string;
289
+ index: number;
290
+ logprobs?: any;
291
+ }[];
292
+ };
293
+ structureAndText: {
261
294
  structure: string;
262
295
  value: any;
296
+ valueText: string;
263
297
  text: string | null;
264
298
  };
265
299
  usage: {
@@ -3,6 +3,7 @@ import z from "zod";
3
3
  import { callWithRetryAndThrottle } from "../../../core/api/callWithRetryAndThrottle.js";
4
4
  import { createJsonResponseHandler, postJsonToApi, } from "../../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../../model-function/AbstractModel.js";
6
+ import { StructureParseError } from "../../../model-function/generate-structure/StructureParseError.js";
6
7
  import { parsePartialJson } from "../../../model-function/generate-structure/parsePartialJson.js";
7
8
  import { PromptFormatTextStreamingModel } from "../../../prompt/PromptFormatTextStreamingModel.js";
8
9
  import { OpenAIApiConfiguration } from "../OpenAIApiConfiguration.js";
@@ -247,11 +248,22 @@ export class OpenAIChatModel extends AbstractModel {
247
248
  },
248
249
  ],
249
250
  });
250
- return {
251
- response,
252
- structure: SecureJSON.parse(response.choices[0].message.function_call.arguments),
253
- usage: this.extractUsage(response),
254
- };
251
+ const valueText = response.choices[0].message.function_call.arguments;
252
+ try {
253
+ return {
254
+ response,
255
+ valueText,
256
+ value: SecureJSON.parse(valueText),
257
+ usage: this.extractUsage(response),
258
+ };
259
+ }
260
+ catch (error) {
261
+ throw new StructureParseError({
262
+ structureName: structureDefinition.name,
263
+ valueText: valueText,
264
+ cause: error,
265
+ });
266
+ }
255
267
  }
256
268
  async doStreamStructure(structureDefinition, prompt, options) {
257
269
  return this.callAPI(prompt, {
@@ -281,22 +293,37 @@ export class OpenAIChatModel extends AbstractModel {
281
293
  const message = response.choices[0].message;
282
294
  const content = message.content;
283
295
  const functionCall = message.function_call;
284
- const structureAndText = functionCall == null
285
- ? {
286
- structure: null,
287
- value: null,
288
- text: content ?? "",
289
- }
290
- : {
291
- structure: functionCall.name,
292
- value: SecureJSON.parse(functionCall.arguments),
293
- text: content,
296
+ if (functionCall == null) {
297
+ return {
298
+ response,
299
+ structureAndText: {
300
+ structure: null,
301
+ value: null,
302
+ valueText: null,
303
+ text: content ?? "",
304
+ },
305
+ usage: this.extractUsage(response),
294
306
  };
295
- return {
296
- response,
297
- structureAndText,
298
- usage: this.extractUsage(response),
299
- };
307
+ }
308
+ try {
309
+ return {
310
+ response,
311
+ structureAndText: {
312
+ structure: functionCall.name,
313
+ value: SecureJSON.parse(functionCall.arguments),
314
+ valueText: functionCall.arguments,
315
+ text: content,
316
+ },
317
+ usage: this.extractUsage(response),
318
+ };
319
+ }
320
+ catch (error) {
321
+ throw new StructureParseError({
322
+ structureName: functionCall.name,
323
+ valueText: functionCall.arguments,
324
+ cause: error,
325
+ });
326
+ }
300
327
  }
301
328
  extractUsage(response) {
302
329
  return {
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "modelfusion",
3
3
  "description": "Build multimodal applications, chatbots, and agents with JavaScript and TypeScript.",
4
- "version": "0.41.3",
4
+ "version": "0.43.0",
5
5
  "author": "Lars Grammel",
6
6
  "license": "MIT",
7
7
  "keywords": [