llm-fns 1.0.20 → 1.0.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -2,7 +2,9 @@ import OpenAI from "openai";
|
|
|
2
2
|
import type PQueue from 'p-queue';
|
|
3
3
|
export declare class LlmFatalError extends Error {
|
|
4
4
|
readonly cause?: any | undefined;
|
|
5
|
-
|
|
5
|
+
readonly messages?: OpenAI.Chat.Completions.ChatCompletionMessageParam[] | undefined;
|
|
6
|
+
readonly rawResponse?: string | null | undefined;
|
|
7
|
+
constructor(message: string, cause?: any | undefined, messages?: OpenAI.Chat.Completions.ChatCompletionMessageParam[] | undefined, rawResponse?: string | null | undefined);
|
|
6
8
|
}
|
|
7
9
|
export declare function countChars(message: OpenAI.Chat.Completions.ChatCompletionMessageParam): number;
|
|
8
10
|
export declare function truncateSingleMessage(message: OpenAI.Chat.Completions.ChatCompletionMessageParam, charLimit: number): OpenAI.Chat.Completions.ChatCompletionMessageParam;
|
package/dist/createLlmClient.js
CHANGED
|
@@ -10,9 +10,13 @@ exports.createLlmClient = createLlmClient;
|
|
|
10
10
|
const retryUtils_js_1 = require("./retryUtils.js");
|
|
11
11
|
class LlmFatalError extends Error {
|
|
12
12
|
cause;
|
|
13
|
-
|
|
13
|
+
messages;
|
|
14
|
+
rawResponse;
|
|
15
|
+
constructor(message, cause, messages, rawResponse) {
|
|
14
16
|
super(message);
|
|
15
17
|
this.cause = cause;
|
|
18
|
+
this.messages = messages;
|
|
19
|
+
this.rawResponse = rawResponse;
|
|
16
20
|
this.name = 'LlmFatalError';
|
|
17
21
|
this.cause = cause;
|
|
18
22
|
}
|
|
@@ -238,7 +242,7 @@ function createLlmClient(params) {
|
|
|
238
242
|
}
|
|
239
243
|
catch (error) {
|
|
240
244
|
if (error?.status === 400 || error?.status === 401 || error?.status === 403) {
|
|
241
|
-
throw new LlmFatalError(error.message || 'Fatal API Error', error);
|
|
245
|
+
throw new LlmFatalError(error.message || 'Fatal API Error', error, finalMessages);
|
|
242
246
|
}
|
|
243
247
|
throw error;
|
|
244
248
|
}
|
|
@@ -17,7 +17,8 @@ export declare class LlmRetryAttemptError extends Error {
|
|
|
17
17
|
readonly conversation: OpenAI.Chat.Completions.ChatCompletionMessageParam[];
|
|
18
18
|
readonly attemptNumber: number;
|
|
19
19
|
readonly error: Error;
|
|
20
|
-
|
|
20
|
+
readonly rawResponse?: string | null | undefined;
|
|
21
|
+
constructor(message: string, mode: 'main' | 'fallback', conversation: OpenAI.Chat.Completions.ChatCompletionMessageParam[], attemptNumber: number, error: Error, rawResponse?: string | null | undefined, options?: ErrorOptions);
|
|
21
22
|
}
|
|
22
23
|
export interface LlmRetryResponseInfo {
|
|
23
24
|
mode: 'main' | 'fallback';
|
|
@@ -33,13 +33,15 @@ class LlmRetryAttemptError extends Error {
|
|
|
33
33
|
conversation;
|
|
34
34
|
attemptNumber;
|
|
35
35
|
error;
|
|
36
|
-
|
|
36
|
+
rawResponse;
|
|
37
|
+
constructor(message, mode, conversation, attemptNumber, error, rawResponse, options) {
|
|
37
38
|
super(message, options);
|
|
38
39
|
this.message = message;
|
|
39
40
|
this.mode = mode;
|
|
40
41
|
this.conversation = conversation;
|
|
41
42
|
this.attemptNumber = attemptNumber;
|
|
42
43
|
this.error = error;
|
|
44
|
+
this.rawResponse = rawResponse;
|
|
43
45
|
this.name = 'LlmRetryAttemptError';
|
|
44
46
|
}
|
|
45
47
|
}
|
|
@@ -131,7 +133,7 @@ function createLlmRetryClient(params) {
|
|
|
131
133
|
}
|
|
132
134
|
catch (error) {
|
|
133
135
|
if (error instanceof createLlmClient_js_1.LlmFatalError) {
|
|
134
|
-
const fatalAttemptError = new LlmRetryAttemptError(`Fatal error on attempt ${attempt + 1}: ${error.message}`, mode, currentMessages, attempt, error, { cause: lastError });
|
|
136
|
+
const fatalAttemptError = new LlmRetryAttemptError(`Fatal error on attempt ${attempt + 1}: ${error.message}`, mode, currentMessages, attempt, error, error.rawResponse, { cause: lastError });
|
|
135
137
|
throw new LlmRetryExhaustedError(`Operation failed with fatal error on attempt ${attempt + 1}.`, { cause: fatalAttemptError });
|
|
136
138
|
}
|
|
137
139
|
if (error instanceof LlmRetryError) {
|
|
@@ -139,7 +141,7 @@ function createLlmRetryClient(params) {
|
|
|
139
141
|
if (error.rawResponse) {
|
|
140
142
|
conversationForError.push({ role: 'assistant', content: error.rawResponse });
|
|
141
143
|
}
|
|
142
|
-
lastError = new LlmRetryAttemptError(`Attempt ${attempt + 1} failed: ${error.message}`, mode, conversationForError, attempt, error, { cause: lastError });
|
|
144
|
+
lastError = new LlmRetryAttemptError(`Attempt ${attempt + 1} failed: ${error.message}`, mode, conversationForError, attempt, error, error.rawResponse, { cause: lastError });
|
|
143
145
|
}
|
|
144
146
|
else {
|
|
145
147
|
throw error;
|