llm-fns 1.0.19 → 1.0.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,8 @@
1
1
  import OpenAI from 'openai';
2
2
  import { PromptFunction, LlmCommonOptions } from "./createLlmClient.js";
3
+ export declare class SchemaValidationError extends Error {
4
+ constructor(message: string, options?: ErrorOptions);
5
+ }
3
6
  /**
4
7
  * Options for JSON schema prompt functions.
5
8
  * Extends common options with JSON-specific settings.
@@ -3,9 +3,17 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
3
3
  return (mod && mod.__esModule) ? mod : { "default": mod };
4
4
  };
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.SchemaValidationError = void 0;
6
7
  exports.createJsonSchemaLlmClient = createJsonSchemaLlmClient;
7
8
  const ajv_1 = __importDefault(require("ajv"));
8
9
  const createLlmRetryClient_js_1 = require("./createLlmRetryClient.js");
10
+ class SchemaValidationError extends Error {
11
+ constructor(message, options) {
12
+ super(message, options);
13
+ this.name = 'SchemaValidationError';
14
+ }
15
+ }
16
+ exports.SchemaValidationError = SchemaValidationError;
9
17
  function createJsonSchemaLlmClient(params) {
10
18
  const { prompt, fallbackPrompt, disableJsonFixer = false } = params;
11
19
  const llmRetryClient = (0, createLlmRetryClient_js_1.createLlmRetryClient)({ prompt, fallbackPrompt });
@@ -63,6 +71,11 @@ ${brokenResponse}
63
71
  return JSON.parse(jsonDataToParse);
64
72
  }
65
73
  catch (parseError) {
74
+ // Only attempt to fix SyntaxErrors (JSON parsing errors).
75
+ // Other errors (like runtime errors) should bubble up.
76
+ if (!(parseError instanceof SyntaxError)) {
77
+ throw parseError;
78
+ }
66
79
  if (disableJsonFixer) {
67
80
  throw parseError;
68
81
  }
@@ -87,6 +100,11 @@ ${brokenResponse}
87
100
  return validator(jsonData);
88
101
  }
89
102
  catch (validationError) {
103
+ // Only attempt to fix known validation errors (SchemaValidationError).
104
+ // Arbitrary errors thrown by custom validators (e.g. "Database Error") should bubble up.
105
+ if (!(validationError instanceof SchemaValidationError)) {
106
+ throw validationError;
107
+ }
90
108
  if (disableJsonFixer) {
91
109
  throw validationError;
92
110
  }
@@ -142,8 +160,8 @@ ${schemaJsonString}`;
142
160
  const validate = ajv.compile(schema);
143
161
  const valid = validate(data);
144
162
  if (!valid) {
145
- const errors = validate.errors?.map(e => `${e.instancePath} ${e.message}`).join(', ');
146
- throw new Error(`AJV Validation Error: ${errors}`);
163
+ const errors = (validate.errors || []).map(e => `${e.instancePath} ${e.message}`).join(', ');
164
+ throw new SchemaValidationError(`AJV Validation Error: ${errors}`);
147
165
  }
148
166
  return data;
149
167
  }
@@ -159,24 +177,34 @@ ${schemaJsonString}`;
159
177
  jsonData = await _parseOrFixJson(llmResponseString, schemaJsonString, options);
160
178
  }
161
179
  catch (parseError) {
162
- const errorMessage = `Your previous response resulted in an error.
180
+ // Only wrap SyntaxErrors (JSON parse errors) for retry.
181
+ if (parseError instanceof SyntaxError) {
182
+ const errorMessage = `Your previous response resulted in an error.
163
183
  Error Type: JSON_PARSE_ERROR
164
184
  Error Details: ${parseError.message}
165
185
  The response provided was not valid JSON. Please correct it.`;
166
- throw new createLlmRetryClient_js_1.LlmRetryError(errorMessage, 'JSON_PARSE_ERROR', undefined, llmResponseString);
186
+ throw new createLlmRetryClient_js_1.LlmRetryError(errorMessage, 'JSON_PARSE_ERROR', undefined, llmResponseString);
187
+ }
188
+ // Rethrow other errors (e.g. fatal errors, runtime errors)
189
+ throw parseError;
167
190
  }
168
191
  try {
169
192
  const validatedData = await _validateOrFix(jsonData, validator, schemaJsonString, options);
170
193
  return validatedData;
171
194
  }
172
195
  catch (validationError) {
173
- const rawResponseForError = JSON.stringify(jsonData, null, 2);
174
- const errorDetails = validationError.message;
175
- const errorMessage = `Your previous response resulted in an error.
196
+ // Only wrap known validation errors for retry.
197
+ if (validationError instanceof SchemaValidationError) {
198
+ const rawResponseForError = JSON.stringify(jsonData, null, 2);
199
+ const errorDetails = validationError.message;
200
+ const errorMessage = `Your previous response resulted in an error.
176
201
  Error Type: SCHEMA_VALIDATION_ERROR
177
202
  Error Details: ${errorDetails}
178
203
  The response was valid JSON but did not conform to the required schema. Please review the errors and the schema to provide a corrected response.`;
179
- throw new createLlmRetryClient_js_1.LlmRetryError(errorMessage, 'CUSTOM_ERROR', validationError, rawResponseForError);
204
+ throw new createLlmRetryClient_js_1.LlmRetryError(errorMessage, 'CUSTOM_ERROR', validationError, rawResponseForError);
205
+ }
206
+ // Rethrow other errors
207
+ throw validationError;
180
208
  }
181
209
  };
182
210
  const { maxRetries, useResponseFormat: _useResponseFormat, beforeValidation, validator: _validator, ...restOptions } = options || {};
@@ -1,5 +1,9 @@
1
1
  import OpenAI from "openai";
2
2
  import type PQueue from 'p-queue';
3
+ export declare class LlmFatalError extends Error {
4
+ readonly cause?: any | undefined;
5
+ constructor(message: string, cause?: any | undefined);
6
+ }
3
7
  export declare function countChars(message: OpenAI.Chat.Completions.ChatCompletionMessageParam): number;
4
8
  export declare function truncateSingleMessage(message: OpenAI.Chat.Completions.ChatCompletionMessageParam, charLimit: number): OpenAI.Chat.Completions.ChatCompletionMessageParam;
5
9
  export declare function truncateMessages(messages: OpenAI.Chat.Completions.ChatCompletionMessageParam[], limit: number): OpenAI.Chat.Completions.ChatCompletionMessageParam[];
@@ -1,5 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.LlmFatalError = void 0;
3
4
  exports.countChars = countChars;
4
5
  exports.truncateSingleMessage = truncateSingleMessage;
5
6
  exports.truncateMessages = truncateMessages;
@@ -7,6 +8,16 @@ exports.mergeRequestOptions = mergeRequestOptions;
7
8
  exports.normalizeOptions = normalizeOptions;
8
9
  exports.createLlmClient = createLlmClient;
9
10
  const retryUtils_js_1 = require("./retryUtils.js");
11
+ class LlmFatalError extends Error {
12
+ cause;
13
+ constructor(message, cause) {
14
+ super(message);
15
+ this.cause = cause;
16
+ this.name = 'LlmFatalError';
17
+ this.cause = cause;
18
+ }
19
+ }
20
+ exports.LlmFatalError = LlmFatalError;
10
21
  function countChars(message) {
11
22
  if (!message.content)
12
23
  return 0;
@@ -222,7 +233,15 @@ function createLlmClient(params) {
222
233
  const promptSummary = getPromptSummary(finalMessages);
223
234
  const apiCall = async () => {
224
235
  const task = () => (0, retryUtils_js_1.executeWithRetry)(async () => {
225
- return openai.chat.completions.create(completionParams, requestOptions);
236
+ try {
237
+ return await openai.chat.completions.create(completionParams, requestOptions);
238
+ }
239
+ catch (error) {
240
+ if (error?.status === 400 || error?.status === 401 || error?.status === 403) {
241
+ throw new LlmFatalError(error.message || 'Fatal API Error', error);
242
+ }
243
+ throw error;
244
+ }
226
245
  }, async (completion) => {
227
246
  if (completion.error) {
228
247
  return {
@@ -231,7 +250,9 @@ function createLlmClient(params) {
231
250
  }
232
251
  return { isValid: true, data: completion };
233
252
  }, retries ?? 3, undefined, (error) => {
234
- if (error?.status === 401 || error?.code === 'invalid_api_key') {
253
+ if (error instanceof LlmFatalError)
254
+ return false;
255
+ if (error?.status === 400 || error?.status === 401 || error?.status === 403 || error?.code === 'invalid_api_key') {
235
256
  return false;
236
257
  }
237
258
  return true;
@@ -16,7 +16,8 @@ export declare class LlmRetryAttemptError extends Error {
16
16
  readonly mode: 'main' | 'fallback';
17
17
  readonly conversation: OpenAI.Chat.Completions.ChatCompletionMessageParam[];
18
18
  readonly attemptNumber: number;
19
- constructor(message: string, mode: 'main' | 'fallback', conversation: OpenAI.Chat.Completions.ChatCompletionMessageParam[], attemptNumber: number, options?: ErrorOptions);
19
+ readonly error: Error;
20
+ constructor(message: string, mode: 'main' | 'fallback', conversation: OpenAI.Chat.Completions.ChatCompletionMessageParam[], attemptNumber: number, error: Error, options?: ErrorOptions);
20
21
  }
21
22
  export interface LlmRetryResponseInfo {
22
23
  mode: 'main' | 'fallback';
@@ -32,12 +32,14 @@ class LlmRetryAttemptError extends Error {
32
32
  mode;
33
33
  conversation;
34
34
  attemptNumber;
35
- constructor(message, mode, conversation, attemptNumber, options) {
35
+ error;
36
+ constructor(message, mode, conversation, attemptNumber, error, options) {
36
37
  super(message, options);
37
38
  this.message = message;
38
39
  this.mode = mode;
39
40
  this.conversation = conversation;
40
41
  this.attemptNumber = attemptNumber;
42
+ this.error = error;
41
43
  this.name = 'LlmRetryAttemptError';
42
44
  }
43
45
  }
@@ -57,7 +59,7 @@ function constructLlmMessages(initialMessages, attemptNumber, previousError) {
57
59
  if (!previousError) {
58
60
  throw new Error("Invariant violation: previousError is missing for a retry attempt.");
59
61
  }
60
- const cause = previousError.cause;
62
+ const cause = previousError.error;
61
63
  if (!(cause instanceof LlmRetryError)) {
62
64
  throw Error('cause must be an instanceof LlmRetryError');
63
65
  }
@@ -128,12 +130,16 @@ function createLlmRetryClient(params) {
128
130
  return dataToProcess;
129
131
  }
130
132
  catch (error) {
133
+ if (error instanceof createLlmClient_js_1.LlmFatalError) {
134
+ const fatalAttemptError = new LlmRetryAttemptError(`Fatal error on attempt ${attempt + 1}: ${error.message}`, mode, currentMessages, attempt, error, { cause: lastError });
135
+ throw new LlmRetryExhaustedError(`Operation failed with fatal error on attempt ${attempt + 1}.`, { cause: fatalAttemptError });
136
+ }
131
137
  if (error instanceof LlmRetryError) {
132
138
  const conversationForError = [...currentMessages];
133
139
  if (error.rawResponse) {
134
140
  conversationForError.push({ role: 'assistant', content: error.rawResponse });
135
141
  }
136
- lastError = new LlmRetryAttemptError(`Attempt ${attempt + 1} failed.`, mode, conversationForError, attempt, { cause: error });
142
+ lastError = new LlmRetryAttemptError(`Attempt ${attempt + 1} failed: ${error.message}`, mode, conversationForError, attempt, error, { cause: lastError });
137
143
  }
138
144
  else {
139
145
  throw error;
@@ -36,6 +36,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
36
36
  exports.normalizeZodArgs = normalizeZodArgs;
37
37
  exports.createZodLlmClient = createZodLlmClient;
38
38
  const z = __importStar(require("zod"));
39
+ const createJsonSchemaLlmClient_js_1 = require("./createJsonSchemaLlmClient.js");
39
40
  function isZodSchema(obj) {
40
41
  return (typeof obj === 'object' &&
41
42
  obj !== null &&
@@ -94,7 +95,15 @@ function createZodLlmClient(params) {
94
95
  unrepresentable: 'any'
95
96
  });
96
97
  const zodValidator = (data) => {
97
- return dataExtractionSchema.parse(data);
98
+ try {
99
+ return dataExtractionSchema.parse(data);
100
+ }
101
+ catch (error) {
102
+ if (error instanceof z.ZodError) {
103
+ throw new createJsonSchemaLlmClient_js_1.SchemaValidationError(error.toString(), { cause: error });
104
+ }
105
+ throw error;
106
+ }
98
107
  };
99
108
  const result = await jsonSchemaClient.promptJson(messages, schema, {
100
109
  ...options,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "llm-fns",
3
- "version": "1.0.19",
3
+ "version": "1.0.20",
4
4
  "description": "",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -14,7 +14,7 @@
14
14
  "ajv": "^8.17.1",
15
15
  "openai": "^6.9.1",
16
16
  "undici": "^7.16.0",
17
- "zod": "^4.1.13"
17
+ "zod": "^4.2.1"
18
18
  },
19
19
  "devDependencies": {
20
20
  "@keyv/sqlite": "^4.0.6",