@jaypie/llm 1.2.5 → 1.2.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/Llm.d.ts +15 -3
- package/dist/cjs/constants.d.ts +47 -47
- package/dist/cjs/index.cjs +172 -65
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/index.d.ts +1 -1
- package/dist/cjs/operate/adapters/OpenAiAdapter.d.ts +1 -1
- package/dist/cjs/tools/Toolkit.class.d.ts +1 -1
- package/dist/cjs/types/LlmProvider.interface.d.ts +21 -0
- package/dist/esm/Llm.d.ts +15 -3
- package/dist/esm/constants.d.ts +47 -47
- package/dist/esm/index.d.ts +1 -1
- package/dist/esm/index.js +157 -50
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/operate/adapters/OpenAiAdapter.d.ts +1 -1
- package/dist/esm/tools/Toolkit.class.d.ts +1 -1
- package/dist/esm/types/LlmProvider.interface.d.ts +21 -0
- package/package.json +5 -5
package/dist/cjs/index.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
export { default as Llm } from "./Llm.js";
|
|
2
2
|
export * as LLM from "./constants.js";
|
|
3
|
-
export type { LlmHistory, LlmInputContent, LlmInputContentFile, LlmInputContentImage, LlmInputContentText, LlmInputMessage, LlmMessageOptions, LlmOperateInput, LlmOperateInputContent, LlmOperateInputFile, LlmOperateInputImage, LlmOperateOptions, LlmOperateResponse, LlmOptions, LlmProvider, } from "./types/LlmProvider.interface.js";
|
|
3
|
+
export type { LlmFallbackConfig, LlmHistory, LlmInputContent, LlmInputContentFile, LlmInputContentImage, LlmInputContentText, LlmInputMessage, LlmMessageOptions, LlmOperateInput, LlmOperateInputContent, LlmOperateInputFile, LlmOperateInputImage, LlmOperateOptions, LlmOperateResponse, LlmOptions, LlmProvider, } from "./types/LlmProvider.interface.js";
|
|
4
4
|
export { LlmMessageRole, LlmMessageType, } from "./types/LlmProvider.interface.js";
|
|
5
5
|
export { isLlmOperateInput, isLlmOperateInputContent, isLlmOperateInputFile, isLlmOperateInputImage, } from "./types/LlmOperateInput.guards.js";
|
|
6
6
|
export type { LlmTool } from "./types/LlmTool.interface.js";
|
|
@@ -12,7 +12,7 @@ import { BaseProviderAdapter } from "./ProviderAdapter.interface.js";
|
|
|
12
12
|
*/
|
|
13
13
|
export declare class OpenAiAdapter extends BaseProviderAdapter {
|
|
14
14
|
readonly name: "openai";
|
|
15
|
-
readonly defaultModel: "gpt-5.
|
|
15
|
+
readonly defaultModel: "gpt-5.2";
|
|
16
16
|
buildRequest(request: OperateRequest): unknown;
|
|
17
17
|
formatTools(toolkit: Toolkit, _outputSchema?: JsonObject): ProviderToolDefinition[];
|
|
18
18
|
formatOutputSchema(schema: JsonObject | NaturalSchema | z.ZodType): JsonObject;
|
|
@@ -144,6 +144,18 @@ interface LlmItemReference {
|
|
|
144
144
|
}
|
|
145
145
|
export type LlmHistoryItem = LlmInputMessage | LlmItemReference | LlmOutputItem | LlmToolResult;
|
|
146
146
|
export type LlmHistory = LlmHistoryItem[];
|
|
147
|
+
/**
|
|
148
|
+
* Configuration for a fallback provider.
|
|
149
|
+
* Used when the primary provider fails with an unrecoverable error.
|
|
150
|
+
*/
|
|
151
|
+
export interface LlmFallbackConfig {
|
|
152
|
+
/** Provider name (e.g., "openai", "anthropic", "gemini") */
|
|
153
|
+
provider: string;
|
|
154
|
+
/** Model to use with this provider (optional, uses provider default if not specified) */
|
|
155
|
+
model?: string;
|
|
156
|
+
/** API key for this provider (optional, uses environment variable if not specified) */
|
|
157
|
+
apiKey?: string;
|
|
158
|
+
}
|
|
147
159
|
export interface LlmMessageOptions {
|
|
148
160
|
data?: NaturalMap;
|
|
149
161
|
model?: string;
|
|
@@ -157,6 +169,8 @@ export interface LlmMessageOptions {
|
|
|
157
169
|
export interface LlmOperateOptions {
|
|
158
170
|
data?: NaturalMap;
|
|
159
171
|
explain?: boolean;
|
|
172
|
+
/** Chain of fallback providers to try if primary fails. Set to false to disable instance-level fallback. */
|
|
173
|
+
fallback?: LlmFallbackConfig[] | false;
|
|
160
174
|
format?: JsonObject | NaturalSchema | z.ZodType;
|
|
161
175
|
history?: LlmHistory;
|
|
162
176
|
hooks?: {
|
|
@@ -215,6 +229,8 @@ export interface LlmOperateOptions {
|
|
|
215
229
|
}
|
|
216
230
|
export interface LlmOptions {
|
|
217
231
|
apiKey?: string;
|
|
232
|
+
/** Chain of fallback providers to try if primary fails */
|
|
233
|
+
fallback?: LlmFallbackConfig[];
|
|
218
234
|
model?: string;
|
|
219
235
|
}
|
|
220
236
|
export interface LlmUsageItem {
|
|
@@ -229,9 +245,14 @@ export type LlmUsage = LlmUsageItem[];
|
|
|
229
245
|
export interface LlmOperateResponse {
|
|
230
246
|
content?: string | JsonObject;
|
|
231
247
|
error?: LlmError;
|
|
248
|
+
/** Number of providers attempted (1 = primary only, >1 = fallback(s) used) */
|
|
249
|
+
fallbackAttempts?: number;
|
|
250
|
+
/** Whether a fallback provider was used instead of the primary */
|
|
251
|
+
fallbackUsed?: boolean;
|
|
232
252
|
history: LlmHistory;
|
|
233
253
|
model?: string;
|
|
234
254
|
output: LlmOutput;
|
|
255
|
+
/** Which provider actually handled the request */
|
|
235
256
|
provider?: string;
|
|
236
257
|
reasoning: string[];
|
|
237
258
|
responses: JsonReturn[];
|
package/dist/esm/Llm.d.ts
CHANGED
|
@@ -1,14 +1,25 @@
|
|
|
1
1
|
import { JsonObject } from "@jaypie/types";
|
|
2
2
|
import { LlmProviderName } from "./constants.js";
|
|
3
|
-
import { LlmHistory, LlmInputMessage, LlmMessageOptions, LlmOperateInput, LlmOperateOptions, LlmOperateResponse, LlmOptions, LlmProvider } from "./types/LlmProvider.interface.js";
|
|
3
|
+
import { LlmFallbackConfig, LlmHistory, LlmInputMessage, LlmMessageOptions, LlmOperateInput, LlmOperateOptions, LlmOperateResponse, LlmOptions, LlmProvider } from "./types/LlmProvider.interface.js";
|
|
4
4
|
import { LlmStreamChunk } from "./types/LlmStreamChunk.interface.js";
|
|
5
5
|
declare class Llm implements LlmProvider {
|
|
6
|
-
private
|
|
6
|
+
private _fallbackConfig?;
|
|
7
7
|
private _llm;
|
|
8
8
|
private _options;
|
|
9
|
+
private _provider;
|
|
9
10
|
constructor(providerName?: LlmProviderName | string, options?: LlmOptions);
|
|
10
11
|
private createProvider;
|
|
11
12
|
send(message: string, options?: LlmMessageOptions): Promise<string | JsonObject>;
|
|
13
|
+
/**
|
|
14
|
+
* Resolves the fallback chain from instance config and per-call options.
|
|
15
|
+
* Per-call options take precedence over instance config.
|
|
16
|
+
* Returns empty array if fallback is disabled.
|
|
17
|
+
*/
|
|
18
|
+
private resolveFallbackChain;
|
|
19
|
+
/**
|
|
20
|
+
* Creates a fallback Llm instance lazily when needed.
|
|
21
|
+
*/
|
|
22
|
+
private createFallbackInstance;
|
|
12
23
|
operate(input: string | LlmHistory | LlmInputMessage | LlmOperateInput, options?: LlmOperateOptions): Promise<LlmOperateResponse>;
|
|
13
24
|
stream(input: string | LlmHistory | LlmInputMessage | LlmOperateInput, options?: LlmOperateOptions): AsyncIterable<LlmStreamChunk>;
|
|
14
25
|
static send(message: string, options?: LlmMessageOptions & {
|
|
@@ -17,8 +28,9 @@ declare class Llm implements LlmProvider {
|
|
|
17
28
|
model?: string;
|
|
18
29
|
}): Promise<string | JsonObject>;
|
|
19
30
|
static operate(input: string | LlmHistory | LlmInputMessage | LlmOperateInput, options?: LlmOperateOptions & {
|
|
20
|
-
llm?: LlmProviderName;
|
|
21
31
|
apiKey?: string;
|
|
32
|
+
fallback?: LlmFallbackConfig[] | false;
|
|
33
|
+
llm?: LlmProviderName;
|
|
22
34
|
model?: string;
|
|
23
35
|
}): Promise<LlmOperateResponse>;
|
|
24
36
|
static stream(input: string | LlmHistory | LlmInputMessage | LlmOperateInput, options?: LlmOperateOptions & {
|
package/dist/esm/constants.d.ts
CHANGED
|
@@ -1,19 +1,28 @@
|
|
|
1
1
|
export declare const PROVIDER: {
|
|
2
|
-
readonly
|
|
2
|
+
readonly ANTHROPIC: {
|
|
3
|
+
readonly MAX_TOKENS: {
|
|
4
|
+
readonly DEFAULT: 4096;
|
|
5
|
+
};
|
|
3
6
|
readonly MODEL: {
|
|
4
|
-
readonly DEFAULT: "
|
|
5
|
-
readonly
|
|
6
|
-
readonly
|
|
7
|
-
readonly TINY: "
|
|
7
|
+
readonly DEFAULT: "claude-sonnet-4-5";
|
|
8
|
+
readonly LARGE: "claude-opus-4-5";
|
|
9
|
+
readonly SMALL: "claude-sonnet-4-5";
|
|
10
|
+
readonly TINY: "claude-haiku-4-5";
|
|
11
|
+
};
|
|
12
|
+
readonly MODEL_MATCH_WORDS: readonly ["anthropic", "claude", "haiku", "opus", "sonnet"];
|
|
13
|
+
readonly NAME: "anthropic";
|
|
14
|
+
readonly PROMPT: {
|
|
15
|
+
readonly AI: "\n\nAssistant:";
|
|
16
|
+
readonly HUMAN: "\n\nHuman:";
|
|
8
17
|
};
|
|
9
|
-
readonly MODEL_MATCH_WORDS: readonly ["openrouter"];
|
|
10
|
-
readonly NAME: "openrouter";
|
|
11
18
|
readonly ROLE: {
|
|
12
19
|
readonly ASSISTANT: "assistant";
|
|
13
20
|
readonly SYSTEM: "system";
|
|
14
|
-
readonly TOOL: "tool";
|
|
15
21
|
readonly USER: "user";
|
|
16
22
|
};
|
|
23
|
+
readonly TOOLS: {
|
|
24
|
+
readonly SCHEMA_VERSION: "v2";
|
|
25
|
+
};
|
|
17
26
|
};
|
|
18
27
|
readonly GEMINI: {
|
|
19
28
|
readonly MODEL: {
|
|
@@ -29,65 +38,56 @@ export declare const PROVIDER: {
|
|
|
29
38
|
readonly USER: "user";
|
|
30
39
|
};
|
|
31
40
|
};
|
|
32
|
-
readonly
|
|
41
|
+
readonly OPENAI: {
|
|
33
42
|
readonly MODEL: {
|
|
34
|
-
readonly DEFAULT: "
|
|
35
|
-
readonly LARGE: "
|
|
36
|
-
readonly SMALL: "
|
|
37
|
-
readonly TINY: "
|
|
43
|
+
readonly DEFAULT: "gpt-5.2";
|
|
44
|
+
readonly LARGE: "gpt-5.2-pro";
|
|
45
|
+
readonly SMALL: "gpt-5-mini";
|
|
46
|
+
readonly TINY: "gpt-5-nano";
|
|
38
47
|
};
|
|
39
|
-
readonly MODEL_MATCH_WORDS: readonly ["
|
|
40
|
-
readonly NAME: "
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
48
|
+
readonly MODEL_MATCH_WORDS: readonly ["openai", "gpt", RegExp];
|
|
49
|
+
readonly NAME: "openai";
|
|
50
|
+
};
|
|
51
|
+
readonly OPENROUTER: {
|
|
52
|
+
readonly MODEL: {
|
|
53
|
+
readonly DEFAULT: "z-ai/glm-4.7";
|
|
54
|
+
readonly LARGE: "z-ai/glm-4.7";
|
|
55
|
+
readonly SMALL: "z-ai/glm-4.7";
|
|
56
|
+
readonly TINY: "z-ai/glm-4.7";
|
|
44
57
|
};
|
|
58
|
+
readonly MODEL_MATCH_WORDS: readonly ["openrouter"];
|
|
59
|
+
readonly NAME: "openrouter";
|
|
45
60
|
readonly ROLE: {
|
|
46
61
|
readonly ASSISTANT: "assistant";
|
|
47
62
|
readonly SYSTEM: "system";
|
|
63
|
+
readonly TOOL: "tool";
|
|
48
64
|
readonly USER: "user";
|
|
49
65
|
};
|
|
50
|
-
readonly MAX_TOKENS: {
|
|
51
|
-
readonly DEFAULT: 4096;
|
|
52
|
-
};
|
|
53
|
-
readonly TOOLS: {
|
|
54
|
-
readonly SCHEMA_VERSION: "v2";
|
|
55
|
-
};
|
|
56
|
-
};
|
|
57
|
-
readonly OPENAI: {
|
|
58
|
-
readonly MODEL: {
|
|
59
|
-
readonly DEFAULT: "gpt-5.1";
|
|
60
|
-
readonly LARGE: "gpt-5.1";
|
|
61
|
-
readonly SMALL: "gpt-5.1-mini";
|
|
62
|
-
readonly TINY: "gpt-5.1-nano";
|
|
63
|
-
};
|
|
64
|
-
readonly MODEL_MATCH_WORDS: readonly ["openai", "gpt", RegExp];
|
|
65
|
-
readonly NAME: "openai";
|
|
66
66
|
};
|
|
67
67
|
};
|
|
68
68
|
export type LlmProviderName = typeof PROVIDER.ANTHROPIC.NAME | typeof PROVIDER.GEMINI.NAME | typeof PROVIDER.OPENAI.NAME | typeof PROVIDER.OPENROUTER.NAME;
|
|
69
69
|
export declare const DEFAULT: {
|
|
70
70
|
readonly MODEL: {
|
|
71
|
-
readonly BASE: "gpt-5.
|
|
72
|
-
readonly LARGE: "gpt-5.
|
|
73
|
-
readonly SMALL: "gpt-5
|
|
74
|
-
readonly TINY: "gpt-5
|
|
71
|
+
readonly BASE: "gpt-5.2";
|
|
72
|
+
readonly LARGE: "gpt-5.2-pro";
|
|
73
|
+
readonly SMALL: "gpt-5-mini";
|
|
74
|
+
readonly TINY: "gpt-5-nano";
|
|
75
75
|
};
|
|
76
76
|
readonly PROVIDER: {
|
|
77
77
|
readonly MODEL: {
|
|
78
|
-
readonly DEFAULT: "gpt-5.
|
|
79
|
-
readonly LARGE: "gpt-5.
|
|
80
|
-
readonly SMALL: "gpt-5
|
|
81
|
-
readonly TINY: "gpt-5
|
|
78
|
+
readonly DEFAULT: "gpt-5.2";
|
|
79
|
+
readonly LARGE: "gpt-5.2-pro";
|
|
80
|
+
readonly SMALL: "gpt-5-mini";
|
|
81
|
+
readonly TINY: "gpt-5-nano";
|
|
82
82
|
};
|
|
83
83
|
readonly MODEL_MATCH_WORDS: readonly ["openai", "gpt", RegExp];
|
|
84
84
|
readonly NAME: "openai";
|
|
85
85
|
};
|
|
86
86
|
};
|
|
87
87
|
export declare const ALL: {
|
|
88
|
-
readonly BASE: readonly ["claude-sonnet-4-5", "gemini-3-pro-preview", "gpt-5.
|
|
89
|
-
readonly COMBINED: readonly ["claude-sonnet-4-5", "claude-opus-4-5", "claude-sonnet-4-5", "claude-haiku-4-5", "gemini-3-pro-preview", "gemini-3-pro-preview", "gemini-3-flash-preview", "gemini-3-flash-preview", "gpt-5.
|
|
90
|
-
readonly LARGE: readonly ["claude-opus-4-5", "gemini-3-pro-preview", "gpt-5.
|
|
91
|
-
readonly SMALL: readonly ["claude-sonnet-4-5", "gemini-3-flash-preview", "gpt-5
|
|
92
|
-
readonly TINY: readonly ["claude-haiku-4-5", "gemini-3-flash-preview", "gpt-5
|
|
88
|
+
readonly BASE: readonly ["claude-sonnet-4-5", "gemini-3-pro-preview", "gpt-5.2"];
|
|
89
|
+
readonly COMBINED: readonly ["claude-sonnet-4-5", "claude-opus-4-5", "claude-sonnet-4-5", "claude-haiku-4-5", "gemini-3-pro-preview", "gemini-3-pro-preview", "gemini-3-flash-preview", "gemini-3-flash-preview", "gpt-5.2", "gpt-5.2-pro", "gpt-5-mini", "gpt-5-nano"];
|
|
90
|
+
readonly LARGE: readonly ["claude-opus-4-5", "gemini-3-pro-preview", "gpt-5.2-pro"];
|
|
91
|
+
readonly SMALL: readonly ["claude-sonnet-4-5", "gemini-3-flash-preview", "gpt-5-mini"];
|
|
92
|
+
readonly TINY: readonly ["claude-haiku-4-5", "gemini-3-flash-preview", "gpt-5-nano"];
|
|
93
93
|
};
|
package/dist/esm/index.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
export { default as Llm } from "./Llm.js";
|
|
2
2
|
export * as LLM from "./constants.js";
|
|
3
|
-
export type { LlmHistory, LlmInputContent, LlmInputContentFile, LlmInputContentImage, LlmInputContentText, LlmInputMessage, LlmMessageOptions, LlmOperateInput, LlmOperateInputContent, LlmOperateInputFile, LlmOperateInputImage, LlmOperateOptions, LlmOperateResponse, LlmOptions, LlmProvider, } from "./types/LlmProvider.interface.js";
|
|
3
|
+
export type { LlmFallbackConfig, LlmHistory, LlmInputContent, LlmInputContentFile, LlmInputContentImage, LlmInputContentText, LlmInputMessage, LlmMessageOptions, LlmOperateInput, LlmOperateInputContent, LlmOperateInputFile, LlmOperateInputImage, LlmOperateOptions, LlmOperateResponse, LlmOptions, LlmProvider, } from "./types/LlmProvider.interface.js";
|
|
4
4
|
export { LlmMessageRole, LlmMessageType, } from "./types/LlmProvider.interface.js";
|
|
5
5
|
export { isLlmOperateInput, isLlmOperateInputContent, isLlmOperateInputFile, isLlmOperateInputImage, } from "./types/LlmOperateInput.guards.js";
|
|
6
6
|
export type { LlmTool } from "./types/LlmTool.interface.js";
|
package/dist/esm/index.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { ConfigurationError, BadGatewayError, TooManyRequestsError, NotImplementedError } from '@jaypie/errors';
|
|
2
|
+
import log$3, { log as log$2 } from '@jaypie/logger';
|
|
2
3
|
import { z } from 'zod/v4';
|
|
3
4
|
import { placeholders, JAYPIE, resolveValue, sleep } from '@jaypie/kit';
|
|
4
|
-
import { log as log$2 } from '@jaypie/logger';
|
|
5
5
|
import RandomLib from 'random';
|
|
6
6
|
import { RateLimitError, APIConnectionError, APIConnectionTimeoutError, InternalServerError, APIUserAbortError, AuthenticationError, BadRequestError, ConflictError, NotFoundError, PermissionDeniedError, UnprocessableEntityError, OpenAI } from 'openai';
|
|
7
7
|
import { zodResponseFormat } from 'openai/helpers/zod';
|
|
@@ -11,49 +11,43 @@ import { resolve } from 'path';
|
|
|
11
11
|
import { getS3FileBuffer, getEnvSecret } from '@jaypie/aws';
|
|
12
12
|
import { fetchWeatherApi } from 'openmeteo';
|
|
13
13
|
|
|
14
|
-
const
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
// Default uses env var OPENROUTER_MODEL if set, otherwise a reasonable default
|
|
21
|
-
DEFAULT: "z-ai/glm-4.7",
|
|
22
|
-
SMALL: "z-ai/glm-4.7",
|
|
23
|
-
LARGE: "z-ai/glm-4.7",
|
|
24
|
-
TINY: "z-ai/glm-4.7",
|
|
25
|
-
},
|
|
26
|
-
MODEL_MATCH_WORDS: ["openrouter"],
|
|
27
|
-
NAME: "openrouter",
|
|
28
|
-
ROLE: {
|
|
29
|
-
ASSISTANT: "assistant",
|
|
30
|
-
SYSTEM: "system",
|
|
31
|
-
TOOL: "tool",
|
|
32
|
-
USER: "user",
|
|
33
|
-
},
|
|
14
|
+
const FIRST_CLASS_PROVIDER = {
|
|
15
|
+
ANTHROPIC: {
|
|
16
|
+
DEFAULT: "claude-sonnet-4-5",
|
|
17
|
+
LARGE: "claude-opus-4-5",
|
|
18
|
+
SMALL: "claude-sonnet-4-5",
|
|
19
|
+
TINY: "claude-haiku-4-5",
|
|
34
20
|
},
|
|
35
21
|
GEMINI: {
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
22
|
+
DEFAULT: "gemini-3-pro-preview",
|
|
23
|
+
LARGE: "gemini-3-pro-preview",
|
|
24
|
+
SMALL: "gemini-3-flash-preview",
|
|
25
|
+
TINY: "gemini-3-flash-preview",
|
|
26
|
+
},
|
|
27
|
+
OPENAI: {
|
|
28
|
+
DEFAULT: "gpt-5.2",
|
|
29
|
+
LARGE: "gpt-5.2-pro",
|
|
30
|
+
SMALL: "gpt-5-mini",
|
|
31
|
+
TINY: "gpt-5-nano",
|
|
32
|
+
},
|
|
33
|
+
OPENROUTER: {
|
|
34
|
+
DEFAULT: "z-ai/glm-4.7",
|
|
35
|
+
LARGE: "z-ai/glm-4.7",
|
|
36
|
+
SMALL: "z-ai/glm-4.7",
|
|
37
|
+
TINY: "z-ai/glm-4.7",
|
|
49
38
|
},
|
|
39
|
+
};
|
|
40
|
+
const PROVIDER = {
|
|
50
41
|
ANTHROPIC: {
|
|
51
42
|
// https://docs.anthropic.com/en/docs/about-claude/models/overview
|
|
43
|
+
MAX_TOKENS: {
|
|
44
|
+
DEFAULT: 4096,
|
|
45
|
+
},
|
|
52
46
|
MODEL: {
|
|
53
|
-
DEFAULT:
|
|
54
|
-
LARGE:
|
|
55
|
-
SMALL:
|
|
56
|
-
TINY:
|
|
47
|
+
DEFAULT: FIRST_CLASS_PROVIDER.ANTHROPIC.DEFAULT,
|
|
48
|
+
LARGE: FIRST_CLASS_PROVIDER.ANTHROPIC.LARGE,
|
|
49
|
+
SMALL: FIRST_CLASS_PROVIDER.ANTHROPIC.SMALL,
|
|
50
|
+
TINY: FIRST_CLASS_PROVIDER.ANTHROPIC.TINY,
|
|
57
51
|
},
|
|
58
52
|
MODEL_MATCH_WORDS: [
|
|
59
53
|
"anthropic",
|
|
@@ -72,24 +66,56 @@ const PROVIDER = {
|
|
|
72
66
|
SYSTEM: "system",
|
|
73
67
|
USER: "user",
|
|
74
68
|
},
|
|
75
|
-
MAX_TOKENS: {
|
|
76
|
-
DEFAULT: 4096,
|
|
77
|
-
},
|
|
78
69
|
TOOLS: {
|
|
79
70
|
SCHEMA_VERSION: "v2",
|
|
80
71
|
},
|
|
81
72
|
},
|
|
73
|
+
GEMINI: {
|
|
74
|
+
// https://ai.google.dev/gemini-api/docs/models
|
|
75
|
+
MODEL: {
|
|
76
|
+
DEFAULT: FIRST_CLASS_PROVIDER.GEMINI.DEFAULT,
|
|
77
|
+
LARGE: FIRST_CLASS_PROVIDER.GEMINI.LARGE,
|
|
78
|
+
SMALL: FIRST_CLASS_PROVIDER.GEMINI.SMALL,
|
|
79
|
+
TINY: FIRST_CLASS_PROVIDER.GEMINI.TINY,
|
|
80
|
+
},
|
|
81
|
+
MODEL_MATCH_WORDS: ["gemini", "google"],
|
|
82
|
+
NAME: "gemini",
|
|
83
|
+
ROLE: {
|
|
84
|
+
MODEL: "model",
|
|
85
|
+
USER: "user",
|
|
86
|
+
},
|
|
87
|
+
},
|
|
82
88
|
OPENAI: {
|
|
83
89
|
// https://platform.openai.com/docs/models
|
|
84
90
|
MODEL: {
|
|
85
|
-
DEFAULT:
|
|
86
|
-
LARGE:
|
|
87
|
-
SMALL:
|
|
88
|
-
TINY:
|
|
91
|
+
DEFAULT: FIRST_CLASS_PROVIDER.OPENAI.DEFAULT,
|
|
92
|
+
LARGE: FIRST_CLASS_PROVIDER.OPENAI.LARGE,
|
|
93
|
+
SMALL: FIRST_CLASS_PROVIDER.OPENAI.SMALL,
|
|
94
|
+
TINY: FIRST_CLASS_PROVIDER.OPENAI.TINY,
|
|
89
95
|
},
|
|
90
96
|
MODEL_MATCH_WORDS: ["openai", "gpt", /^o\d/],
|
|
91
97
|
NAME: "openai",
|
|
92
98
|
},
|
|
99
|
+
OPENROUTER: {
|
|
100
|
+
// https://openrouter.ai/models
|
|
101
|
+
// OpenRouter provides access to hundreds of models from various providers
|
|
102
|
+
// The model format is: provider/model-name (e.g., "openai/gpt-4", "anthropic/claude-3-opus")
|
|
103
|
+
MODEL: {
|
|
104
|
+
// Default uses env var OPENROUTER_MODEL if set, otherwise a reasonable default
|
|
105
|
+
DEFAULT: FIRST_CLASS_PROVIDER.OPENROUTER.DEFAULT,
|
|
106
|
+
LARGE: FIRST_CLASS_PROVIDER.OPENROUTER.LARGE,
|
|
107
|
+
SMALL: FIRST_CLASS_PROVIDER.OPENROUTER.SMALL,
|
|
108
|
+
TINY: FIRST_CLASS_PROVIDER.OPENROUTER.TINY,
|
|
109
|
+
},
|
|
110
|
+
MODEL_MATCH_WORDS: ["openrouter"],
|
|
111
|
+
NAME: "openrouter",
|
|
112
|
+
ROLE: {
|
|
113
|
+
ASSISTANT: "assistant",
|
|
114
|
+
SYSTEM: "system",
|
|
115
|
+
TOOL: "tool",
|
|
116
|
+
USER: "user",
|
|
117
|
+
},
|
|
118
|
+
},
|
|
93
119
|
};
|
|
94
120
|
// Last: Defaults
|
|
95
121
|
const DEFAULT = {
|
|
@@ -5183,7 +5209,7 @@ class OpenRouterProvider {
|
|
|
5183
5209
|
|
|
5184
5210
|
class Llm {
|
|
5185
5211
|
constructor(providerName = DEFAULT.PROVIDER.NAME, options = {}) {
|
|
5186
|
-
const { model } = options;
|
|
5212
|
+
const { fallback, model } = options;
|
|
5187
5213
|
let finalProvider = providerName;
|
|
5188
5214
|
let finalModel = model;
|
|
5189
5215
|
if (model) {
|
|
@@ -5212,6 +5238,7 @@ class Llm {
|
|
|
5212
5238
|
finalModel = undefined;
|
|
5213
5239
|
}
|
|
5214
5240
|
}
|
|
5241
|
+
this._fallbackConfig = fallback;
|
|
5215
5242
|
this._provider = finalProvider;
|
|
5216
5243
|
this._options = { ...options, model: finalModel };
|
|
5217
5244
|
this._llm = this.createProvider(finalProvider, this._options);
|
|
@@ -5240,11 +5267,81 @@ class Llm {
|
|
|
5240
5267
|
async send(message, options) {
|
|
5241
5268
|
return this._llm.send(message, options);
|
|
5242
5269
|
}
|
|
5270
|
+
/**
|
|
5271
|
+
* Resolves the fallback chain from instance config and per-call options.
|
|
5272
|
+
* Per-call options take precedence over instance config.
|
|
5273
|
+
* Returns empty array if fallback is disabled.
|
|
5274
|
+
*/
|
|
5275
|
+
resolveFallbackChain(options) {
|
|
5276
|
+
// Per-call `fallback: false` disables fallback entirely
|
|
5277
|
+
if (options.fallback === false) {
|
|
5278
|
+
return [];
|
|
5279
|
+
}
|
|
5280
|
+
// Per-call fallback array overrides instance config
|
|
5281
|
+
if (Array.isArray(options.fallback)) {
|
|
5282
|
+
return options.fallback;
|
|
5283
|
+
}
|
|
5284
|
+
// Use instance config if available
|
|
5285
|
+
return this._fallbackConfig || [];
|
|
5286
|
+
}
|
|
5287
|
+
/**
|
|
5288
|
+
* Creates a fallback Llm instance lazily when needed.
|
|
5289
|
+
*/
|
|
5290
|
+
createFallbackInstance(config) {
|
|
5291
|
+
return new Llm(config.provider, {
|
|
5292
|
+
apiKey: config.apiKey,
|
|
5293
|
+
model: config.model,
|
|
5294
|
+
});
|
|
5295
|
+
}
|
|
5243
5296
|
async operate(input, options = {}) {
|
|
5244
5297
|
if (!this._llm.operate) {
|
|
5245
5298
|
throw new NotImplementedError(`Provider ${this._provider} does not support operate method`);
|
|
5246
5299
|
}
|
|
5247
|
-
|
|
5300
|
+
const fallbackChain = this.resolveFallbackChain(options);
|
|
5301
|
+
const optionsWithoutFallback = { ...options, fallback: false };
|
|
5302
|
+
let lastError;
|
|
5303
|
+
let attempts = 0;
|
|
5304
|
+
// Try primary provider first
|
|
5305
|
+
attempts++;
|
|
5306
|
+
try {
|
|
5307
|
+
const response = await this._llm.operate(input, optionsWithoutFallback);
|
|
5308
|
+
return {
|
|
5309
|
+
...response,
|
|
5310
|
+
fallbackAttempts: attempts,
|
|
5311
|
+
fallbackUsed: false,
|
|
5312
|
+
provider: response.provider || this._provider,
|
|
5313
|
+
};
|
|
5314
|
+
}
|
|
5315
|
+
catch (error) {
|
|
5316
|
+
lastError = error;
|
|
5317
|
+
log$3.warn(`Provider ${this._provider} failed`, {
|
|
5318
|
+
error: lastError.message,
|
|
5319
|
+
fallbacksRemaining: fallbackChain.length,
|
|
5320
|
+
});
|
|
5321
|
+
}
|
|
5322
|
+
// Try fallback providers
|
|
5323
|
+
for (const fallbackConfig of fallbackChain) {
|
|
5324
|
+
attempts++;
|
|
5325
|
+
try {
|
|
5326
|
+
const fallbackInstance = this.createFallbackInstance(fallbackConfig);
|
|
5327
|
+
const response = await fallbackInstance.operate(input, optionsWithoutFallback);
|
|
5328
|
+
return {
|
|
5329
|
+
...response,
|
|
5330
|
+
fallbackAttempts: attempts,
|
|
5331
|
+
fallbackUsed: true,
|
|
5332
|
+
provider: response.provider || fallbackConfig.provider,
|
|
5333
|
+
};
|
|
5334
|
+
}
|
|
5335
|
+
catch (error) {
|
|
5336
|
+
lastError = error;
|
|
5337
|
+
log$3.warn(`Fallback provider ${fallbackConfig.provider} failed`, {
|
|
5338
|
+
error: lastError.message,
|
|
5339
|
+
fallbacksRemaining: fallbackChain.length - attempts + 1,
|
|
5340
|
+
});
|
|
5341
|
+
}
|
|
5342
|
+
}
|
|
5343
|
+
// All providers failed, throw the last error
|
|
5344
|
+
throw lastError;
|
|
5248
5345
|
}
|
|
5249
5346
|
async *stream(input, options = {}) {
|
|
5250
5347
|
if (!this._llm.stream) {
|
|
@@ -5258,7 +5355,7 @@ class Llm {
|
|
|
5258
5355
|
return instance.send(message, messageOptions);
|
|
5259
5356
|
}
|
|
5260
5357
|
static async operate(input, options) {
|
|
5261
|
-
const {
|
|
5358
|
+
const { apiKey, fallback, llm, model, ...operateOptions } = options || {};
|
|
5262
5359
|
let finalLlm = llm;
|
|
5263
5360
|
let finalModel = model;
|
|
5264
5361
|
if (!llm && model) {
|
|
@@ -5275,8 +5372,18 @@ class Llm {
|
|
|
5275
5372
|
finalModel = undefined;
|
|
5276
5373
|
}
|
|
5277
5374
|
}
|
|
5278
|
-
|
|
5279
|
-
|
|
5375
|
+
// Resolve fallback for static method: pass to instance if array, pass to operate options if false
|
|
5376
|
+
const instanceFallback = Array.isArray(fallback) ? fallback : undefined;
|
|
5377
|
+
const operateFallback = fallback === false ? false : undefined;
|
|
5378
|
+
const instance = new Llm(finalLlm, {
|
|
5379
|
+
apiKey,
|
|
5380
|
+
fallback: instanceFallback,
|
|
5381
|
+
model: finalModel,
|
|
5382
|
+
});
|
|
5383
|
+
return instance.operate(input, {
|
|
5384
|
+
...operateOptions,
|
|
5385
|
+
...(operateFallback !== undefined && { fallback: operateFallback }),
|
|
5386
|
+
});
|
|
5280
5387
|
}
|
|
5281
5388
|
static stream(input, options) {
|
|
5282
5389
|
const { llm, apiKey, model, ...streamOptions } = options || {};
|