llm-fns 1.0.19 → 1.0.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/createJsonSchemaLlmClient.d.ts +3 -0
- package/dist/createJsonSchemaLlmClient.js +36 -8
- package/dist/createLlmClient.d.ts +6 -0
- package/dist/createLlmClient.js +27 -2
- package/dist/createLlmRetryClient.d.ts +3 -1
- package/dist/createLlmRetryClient.js +11 -3
- package/dist/createZodLlmClient.js +10 -1
- package/package.json +2 -2
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
import OpenAI from 'openai';
|
|
2
2
|
import { PromptFunction, LlmCommonOptions } from "./createLlmClient.js";
|
|
3
|
+
export declare class SchemaValidationError extends Error {
|
|
4
|
+
constructor(message: string, options?: ErrorOptions);
|
|
5
|
+
}
|
|
3
6
|
/**
|
|
4
7
|
* Options for JSON schema prompt functions.
|
|
5
8
|
* Extends common options with JSON-specific settings.
|
|
@@ -3,9 +3,17 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.SchemaValidationError = void 0;
|
|
6
7
|
exports.createJsonSchemaLlmClient = createJsonSchemaLlmClient;
|
|
7
8
|
const ajv_1 = __importDefault(require("ajv"));
|
|
8
9
|
const createLlmRetryClient_js_1 = require("./createLlmRetryClient.js");
|
|
10
|
+
class SchemaValidationError extends Error {
|
|
11
|
+
constructor(message, options) {
|
|
12
|
+
super(message, options);
|
|
13
|
+
this.name = 'SchemaValidationError';
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
exports.SchemaValidationError = SchemaValidationError;
|
|
9
17
|
function createJsonSchemaLlmClient(params) {
|
|
10
18
|
const { prompt, fallbackPrompt, disableJsonFixer = false } = params;
|
|
11
19
|
const llmRetryClient = (0, createLlmRetryClient_js_1.createLlmRetryClient)({ prompt, fallbackPrompt });
|
|
@@ -63,6 +71,11 @@ ${brokenResponse}
|
|
|
63
71
|
return JSON.parse(jsonDataToParse);
|
|
64
72
|
}
|
|
65
73
|
catch (parseError) {
|
|
74
|
+
// Only attempt to fix SyntaxErrors (JSON parsing errors).
|
|
75
|
+
// Other errors (like runtime errors) should bubble up.
|
|
76
|
+
if (!(parseError instanceof SyntaxError)) {
|
|
77
|
+
throw parseError;
|
|
78
|
+
}
|
|
66
79
|
if (disableJsonFixer) {
|
|
67
80
|
throw parseError;
|
|
68
81
|
}
|
|
@@ -87,6 +100,11 @@ ${brokenResponse}
|
|
|
87
100
|
return validator(jsonData);
|
|
88
101
|
}
|
|
89
102
|
catch (validationError) {
|
|
103
|
+
// Only attempt to fix known validation errors (SchemaValidationError).
|
|
104
|
+
// Arbitrary errors thrown by custom validators (e.g. "Database Error") should bubble up.
|
|
105
|
+
if (!(validationError instanceof SchemaValidationError)) {
|
|
106
|
+
throw validationError;
|
|
107
|
+
}
|
|
90
108
|
if (disableJsonFixer) {
|
|
91
109
|
throw validationError;
|
|
92
110
|
}
|
|
@@ -142,8 +160,8 @@ ${schemaJsonString}`;
|
|
|
142
160
|
const validate = ajv.compile(schema);
|
|
143
161
|
const valid = validate(data);
|
|
144
162
|
if (!valid) {
|
|
145
|
-
const errors = validate.errors
|
|
146
|
-
throw new
|
|
163
|
+
const errors = (validate.errors || []).map(e => `${e.instancePath} ${e.message}`).join(', ');
|
|
164
|
+
throw new SchemaValidationError(`AJV Validation Error: ${errors}`);
|
|
147
165
|
}
|
|
148
166
|
return data;
|
|
149
167
|
}
|
|
@@ -159,24 +177,34 @@ ${schemaJsonString}`;
|
|
|
159
177
|
jsonData = await _parseOrFixJson(llmResponseString, schemaJsonString, options);
|
|
160
178
|
}
|
|
161
179
|
catch (parseError) {
|
|
162
|
-
|
|
180
|
+
// Only wrap SyntaxErrors (JSON parse errors) for retry.
|
|
181
|
+
if (parseError instanceof SyntaxError) {
|
|
182
|
+
const errorMessage = `Your previous response resulted in an error.
|
|
163
183
|
Error Type: JSON_PARSE_ERROR
|
|
164
184
|
Error Details: ${parseError.message}
|
|
165
185
|
The response provided was not valid JSON. Please correct it.`;
|
|
166
|
-
|
|
186
|
+
throw new createLlmRetryClient_js_1.LlmRetryError(errorMessage, 'JSON_PARSE_ERROR', undefined, llmResponseString);
|
|
187
|
+
}
|
|
188
|
+
// Rethrow other errors (e.g. fatal errors, runtime errors)
|
|
189
|
+
throw parseError;
|
|
167
190
|
}
|
|
168
191
|
try {
|
|
169
192
|
const validatedData = await _validateOrFix(jsonData, validator, schemaJsonString, options);
|
|
170
193
|
return validatedData;
|
|
171
194
|
}
|
|
172
195
|
catch (validationError) {
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
196
|
+
// Only wrap known validation errors for retry.
|
|
197
|
+
if (validationError instanceof SchemaValidationError) {
|
|
198
|
+
const rawResponseForError = JSON.stringify(jsonData, null, 2);
|
|
199
|
+
const errorDetails = validationError.message;
|
|
200
|
+
const errorMessage = `Your previous response resulted in an error.
|
|
176
201
|
Error Type: SCHEMA_VALIDATION_ERROR
|
|
177
202
|
Error Details: ${errorDetails}
|
|
178
203
|
The response was valid JSON but did not conform to the required schema. Please review the errors and the schema to provide a corrected response.`;
|
|
179
|
-
|
|
204
|
+
throw new createLlmRetryClient_js_1.LlmRetryError(errorMessage, 'CUSTOM_ERROR', validationError, rawResponseForError);
|
|
205
|
+
}
|
|
206
|
+
// Rethrow other errors
|
|
207
|
+
throw validationError;
|
|
180
208
|
}
|
|
181
209
|
};
|
|
182
210
|
const { maxRetries, useResponseFormat: _useResponseFormat, beforeValidation, validator: _validator, ...restOptions } = options || {};
|
|
@@ -1,5 +1,11 @@
|
|
|
1
1
|
import OpenAI from "openai";
|
|
2
2
|
import type PQueue from 'p-queue';
|
|
3
|
+
export declare class LlmFatalError extends Error {
|
|
4
|
+
readonly cause?: any | undefined;
|
|
5
|
+
readonly messages?: OpenAI.Chat.Completions.ChatCompletionMessageParam[] | undefined;
|
|
6
|
+
readonly rawResponse?: string | null | undefined;
|
|
7
|
+
constructor(message: string, cause?: any | undefined, messages?: OpenAI.Chat.Completions.ChatCompletionMessageParam[] | undefined, rawResponse?: string | null | undefined);
|
|
8
|
+
}
|
|
3
9
|
export declare function countChars(message: OpenAI.Chat.Completions.ChatCompletionMessageParam): number;
|
|
4
10
|
export declare function truncateSingleMessage(message: OpenAI.Chat.Completions.ChatCompletionMessageParam, charLimit: number): OpenAI.Chat.Completions.ChatCompletionMessageParam;
|
|
5
11
|
export declare function truncateMessages(messages: OpenAI.Chat.Completions.ChatCompletionMessageParam[], limit: number): OpenAI.Chat.Completions.ChatCompletionMessageParam[];
|
package/dist/createLlmClient.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.LlmFatalError = void 0;
|
|
3
4
|
exports.countChars = countChars;
|
|
4
5
|
exports.truncateSingleMessage = truncateSingleMessage;
|
|
5
6
|
exports.truncateMessages = truncateMessages;
|
|
@@ -7,6 +8,20 @@ exports.mergeRequestOptions = mergeRequestOptions;
|
|
|
7
8
|
exports.normalizeOptions = normalizeOptions;
|
|
8
9
|
exports.createLlmClient = createLlmClient;
|
|
9
10
|
const retryUtils_js_1 = require("./retryUtils.js");
|
|
11
|
+
class LlmFatalError extends Error {
|
|
12
|
+
cause;
|
|
13
|
+
messages;
|
|
14
|
+
rawResponse;
|
|
15
|
+
constructor(message, cause, messages, rawResponse) {
|
|
16
|
+
super(message);
|
|
17
|
+
this.cause = cause;
|
|
18
|
+
this.messages = messages;
|
|
19
|
+
this.rawResponse = rawResponse;
|
|
20
|
+
this.name = 'LlmFatalError';
|
|
21
|
+
this.cause = cause;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
exports.LlmFatalError = LlmFatalError;
|
|
10
25
|
function countChars(message) {
|
|
11
26
|
if (!message.content)
|
|
12
27
|
return 0;
|
|
@@ -222,7 +237,15 @@ function createLlmClient(params) {
|
|
|
222
237
|
const promptSummary = getPromptSummary(finalMessages);
|
|
223
238
|
const apiCall = async () => {
|
|
224
239
|
const task = () => (0, retryUtils_js_1.executeWithRetry)(async () => {
|
|
225
|
-
|
|
240
|
+
try {
|
|
241
|
+
return await openai.chat.completions.create(completionParams, requestOptions);
|
|
242
|
+
}
|
|
243
|
+
catch (error) {
|
|
244
|
+
if (error?.status === 400 || error?.status === 401 || error?.status === 403) {
|
|
245
|
+
throw new LlmFatalError(error.message || 'Fatal API Error', error, finalMessages);
|
|
246
|
+
}
|
|
247
|
+
throw error;
|
|
248
|
+
}
|
|
226
249
|
}, async (completion) => {
|
|
227
250
|
if (completion.error) {
|
|
228
251
|
return {
|
|
@@ -231,7 +254,9 @@ function createLlmClient(params) {
|
|
|
231
254
|
}
|
|
232
255
|
return { isValid: true, data: completion };
|
|
233
256
|
}, retries ?? 3, undefined, (error) => {
|
|
234
|
-
if (error
|
|
257
|
+
if (error instanceof LlmFatalError)
|
|
258
|
+
return false;
|
|
259
|
+
if (error?.status === 400 || error?.status === 401 || error?.status === 403 || error?.code === 'invalid_api_key') {
|
|
235
260
|
return false;
|
|
236
261
|
}
|
|
237
262
|
return true;
|
|
@@ -16,7 +16,9 @@ export declare class LlmRetryAttemptError extends Error {
|
|
|
16
16
|
readonly mode: 'main' | 'fallback';
|
|
17
17
|
readonly conversation: OpenAI.Chat.Completions.ChatCompletionMessageParam[];
|
|
18
18
|
readonly attemptNumber: number;
|
|
19
|
-
|
|
19
|
+
readonly error: Error;
|
|
20
|
+
readonly rawResponse?: string | null | undefined;
|
|
21
|
+
constructor(message: string, mode: 'main' | 'fallback', conversation: OpenAI.Chat.Completions.ChatCompletionMessageParam[], attemptNumber: number, error: Error, rawResponse?: string | null | undefined, options?: ErrorOptions);
|
|
20
22
|
}
|
|
21
23
|
export interface LlmRetryResponseInfo {
|
|
22
24
|
mode: 'main' | 'fallback';
|
|
@@ -32,12 +32,16 @@ class LlmRetryAttemptError extends Error {
|
|
|
32
32
|
mode;
|
|
33
33
|
conversation;
|
|
34
34
|
attemptNumber;
|
|
35
|
-
|
|
35
|
+
error;
|
|
36
|
+
rawResponse;
|
|
37
|
+
constructor(message, mode, conversation, attemptNumber, error, rawResponse, options) {
|
|
36
38
|
super(message, options);
|
|
37
39
|
this.message = message;
|
|
38
40
|
this.mode = mode;
|
|
39
41
|
this.conversation = conversation;
|
|
40
42
|
this.attemptNumber = attemptNumber;
|
|
43
|
+
this.error = error;
|
|
44
|
+
this.rawResponse = rawResponse;
|
|
41
45
|
this.name = 'LlmRetryAttemptError';
|
|
42
46
|
}
|
|
43
47
|
}
|
|
@@ -57,7 +61,7 @@ function constructLlmMessages(initialMessages, attemptNumber, previousError) {
|
|
|
57
61
|
if (!previousError) {
|
|
58
62
|
throw new Error("Invariant violation: previousError is missing for a retry attempt.");
|
|
59
63
|
}
|
|
60
|
-
const cause = previousError.
|
|
64
|
+
const cause = previousError.error;
|
|
61
65
|
if (!(cause instanceof LlmRetryError)) {
|
|
62
66
|
throw Error('cause must be an instanceof LlmRetryError');
|
|
63
67
|
}
|
|
@@ -128,12 +132,16 @@ function createLlmRetryClient(params) {
|
|
|
128
132
|
return dataToProcess;
|
|
129
133
|
}
|
|
130
134
|
catch (error) {
|
|
135
|
+
if (error instanceof createLlmClient_js_1.LlmFatalError) {
|
|
136
|
+
const fatalAttemptError = new LlmRetryAttemptError(`Fatal error on attempt ${attempt + 1}: ${error.message}`, mode, currentMessages, attempt, error, error.rawResponse, { cause: lastError });
|
|
137
|
+
throw new LlmRetryExhaustedError(`Operation failed with fatal error on attempt ${attempt + 1}.`, { cause: fatalAttemptError });
|
|
138
|
+
}
|
|
131
139
|
if (error instanceof LlmRetryError) {
|
|
132
140
|
const conversationForError = [...currentMessages];
|
|
133
141
|
if (error.rawResponse) {
|
|
134
142
|
conversationForError.push({ role: 'assistant', content: error.rawResponse });
|
|
135
143
|
}
|
|
136
|
-
lastError = new LlmRetryAttemptError(`Attempt ${attempt + 1} failed
|
|
144
|
+
lastError = new LlmRetryAttemptError(`Attempt ${attempt + 1} failed: ${error.message}`, mode, conversationForError, attempt, error, error.rawResponse, { cause: lastError });
|
|
137
145
|
}
|
|
138
146
|
else {
|
|
139
147
|
throw error;
|
|
@@ -36,6 +36,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
36
36
|
exports.normalizeZodArgs = normalizeZodArgs;
|
|
37
37
|
exports.createZodLlmClient = createZodLlmClient;
|
|
38
38
|
const z = __importStar(require("zod"));
|
|
39
|
+
const createJsonSchemaLlmClient_js_1 = require("./createJsonSchemaLlmClient.js");
|
|
39
40
|
function isZodSchema(obj) {
|
|
40
41
|
return (typeof obj === 'object' &&
|
|
41
42
|
obj !== null &&
|
|
@@ -94,7 +95,15 @@ function createZodLlmClient(params) {
|
|
|
94
95
|
unrepresentable: 'any'
|
|
95
96
|
});
|
|
96
97
|
const zodValidator = (data) => {
|
|
97
|
-
|
|
98
|
+
try {
|
|
99
|
+
return dataExtractionSchema.parse(data);
|
|
100
|
+
}
|
|
101
|
+
catch (error) {
|
|
102
|
+
if (error instanceof z.ZodError) {
|
|
103
|
+
throw new createJsonSchemaLlmClient_js_1.SchemaValidationError(error.toString(), { cause: error });
|
|
104
|
+
}
|
|
105
|
+
throw error;
|
|
106
|
+
}
|
|
98
107
|
};
|
|
99
108
|
const result = await jsonSchemaClient.promptJson(messages, schema, {
|
|
100
109
|
...options,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "llm-fns",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.21",
|
|
4
4
|
"description": "",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
"ajv": "^8.17.1",
|
|
15
15
|
"openai": "^6.9.1",
|
|
16
16
|
"undici": "^7.16.0",
|
|
17
|
-
"zod": "^4.1
|
|
17
|
+
"zod": "^4.2.1"
|
|
18
18
|
},
|
|
19
19
|
"devDependencies": {
|
|
20
20
|
"@keyv/sqlite": "^4.0.6",
|